Example #1
0
def main(argv):
    batch_size = 200
    epochs = 50

    # Load MNIST dataset
    (x_train, y_train), (x_test, y_test) = mnist.load_data()

    # Convert images from [0,255] to [0,1]
    x_train = x_train.astype('float32') / 255.0
    x_test = x_test.astype('float32') / 255.0

    # Extract input dimension and distinct output classes from dataset
    input_shape = x_train.shape[1:]
    num_classes = len(np.unique(y_train))
    num_train_samples = x_train.shape[0]

    # Convert y to categorical one-hot vectors
    y_train = to_categorical(y_train, num_classes=num_classes)
    y_test = to_categorical(y_test, num_classes=num_classes)

    # Create and compile model
    model = mnist_dense_model(input_shape=input_shape,
                              n_classes=num_classes,
                              dropout=0.5,
                              model_name=get_project_name(argv[0]))

    model.compile(loss=categorical_crossentropy,
                  optimizer=Adam(),
                  metrics=['accuracy'])

    # Print summary and save model as plot and node-link-graph
    project_paths = get_project_paths(argv[0], to_tmp=False)

    save_graph_plot(model, project_paths["plots"] + "/model.ps")
    save_graph_json(model, project_paths["graphs"] + "/model.json")
    import pdb
    pdb.set_trace()
    # Train model while saving weights as checkpoints after each epoch
    model.fit(
        x_train,
        y_train,
        steps_per_epoch=num_train_samples / batch_size /
        5,  # 5 epochs per full dataset rotation
        batch_size=batch_size,
        epochs=epochs,
        verbose=1,
        callbacks=[
            ModelCheckpoint(project_paths["checkpoints"] +
                            "/weights_epoch-{epoch:02d}.hdf5",
                            save_weights_only=True,
                            save_freq='epoch')
        ],
        validation_data=(x_test, y_test))
Example #2
0
        keras.Input(shape=input_shape),
        layers.Conv2D(2, kernel_size=(3, 3), activation="relu"),
        layers.MaxPooling2D(pool_size=(2, 2)),
        layers.Conv2D(4, kernel_size=(3, 3), activation="relu"),
        layers.MaxPooling2D(pool_size=(2, 2)),
        layers.Flatten(),
        layers.Dropout(0.5),
        layers.Dense(num_classes, activation="softmax"),
    ])
    return model


model1 = create_model()
model1.summary()
save_graph_plot(model1, project_paths["plots"] + "/reg_model.ps")
save_graph_json(model1, project_paths["weights"] + "/reg_model.json")
date_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
log_dir = project_paths["tb"] + "/reg/" + date_time
tensorboard_callback1 = tf.keras.callbacks.TensorBoard(log_dir=log_dir,
                                                       histogram_freq=1)
checkpoint_path = project_paths["checkpoints"] + "/weights_epoch-{epoch}.ckpt"
checkpoint_dir = os.path.dirname(checkpoint_path)
"""
## Train the model
"""

batch_size = 128

model1.compile(loss="categorical_crossentropy",
               optimizer="adam",
               metrics=["accuracy"])
Example #3
0
def create_model():
    model = models.Sequential()
    model.add(layers.Flatten(input_shape=(28, 28)))
    model.add(layers.Dense(2, activation='relu'))
    model.add(layers.Dense(10))
    model.compile(
        loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
        metrics=['accuracy'],
        optimizer='adam')
    return model


reg_model = create_model()
save_graph_plot(reg_model, project_paths["plots"] + "/reg_model.ps")
save_graph_json(reg_model, project_paths["plots"] + "/reg_model.json")

reg_model_hist = reg_model.fit(
    train_images,
    train_labels,
    epochs=epochs,
    validation_data=(test_images, test_labels),
    callbacks=[csv_logger, callback_save_model_reg, callback_weights_reg])

model_list.append(reg_model_hist)
model_name_list.append("Regular model ")

# Pred Model
# pred_model =  create_model()
pred_model = tf.keras.models.load_model(restore_path)
Example #4
0
model.add(tf.keras.layers.MaxPooling2D((2, 2)))
model.add(tf.keras.layers.Conv2D(128, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Flatten())
model.add(layers.Dense(256, activation='relu'))
model.add(layers.Dense(128, activation='relu'))
model.add(layers.Dense(num_classes))
model.summary()
model.compile(
    optimizer='adam',
    loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    metrics=['accuracy'])

project_paths = get_project_paths(sys.argv[0], to_tmp=False)
save_graph_plot(model, project_paths["plots"] + "/model.ps")
save_graph_json(model, project_paths["weights"] + "/model.json")

log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir,
                                                      histogram_freq=1)

history = model.fit(
    train_images,
    train_labels,
    steps_per_epoch=num_train_samples /
    batch_size,  # 5 epochs per full dataset rotation
    epochs=epochs,
    batch_size=batch_size,
    #validation_split=validation_split,
    validation_data=(test_images, test_labels),
    callbacks=[