Example #1
0
def main():
    nb_classes = 40
    train_file = './ModelNet40/ply_data_train.h5'
    test_file = './ModelNet40/ply_data_test.h5'

    epochs = 100
    batch_size = 32

    train = DataGenerator(train_file, batch_size, nb_classes, train=True)
    val = DataGenerator(test_file, batch_size, nb_classes, train=False)

    model = PointNet(nb_classes)
    model.summary()
    lr = 0.0001
    adam = Adam(lr=lr)
    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    if not os.path.exists('./results/'):
        os.mkdir('./results/')
    checkpoint = ModelCheckpoint('./results/pointnet.h5', monitor='val_acc',
                                 save_weights_only=True, save_best_only=True,
                                 verbose=1)
    history = model.fit_generator(train.generator(),
                                  steps_per_epoch=9840 // batch_size,
                                  epochs=epochs,
                                  validation_data=val.generator(),
                                  validation_steps=2468 // batch_size,
                                  callbacks=[checkpoint, onetenth_50_75(lr)],
                                  verbose=1)

    plot_history(history, './results/')
    save_history(history, './results/')
    model.save_weights('./results/pointnet_weights.h5')
Example #2
0
def main():
    nb_classes = 40
    train_file = './ModelNet40/ply_data_train.h5'
    test_file = './ModelNet40/ply_data_test.h5'
    # train_num_points = 9840
    # test_num_points = 2468

    # below variables are for dev propose
    train_num_points = 1648
    test_num_points = 420

    epochs = 100
    batch_size = 32

    train = DataGenerator(train_file, batch_size, nb_classes, train=True)
    val = DataGenerator(test_file, batch_size, nb_classes, train=False)

    model = pointnet2(nb_classes)
    model.summary()

    lr = 0.0001
    adam = Adam(lr=lr)
    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    if not os.path.exists('./results/'):
        os.mkdir('./results/')

    last_epoch, last_meta = get_last_status(model)

    checkpoint = MetaCheckpoint('./results/pointnet.h5',
                                monitor='val_acc',
                                save_weights_only=True,
                                save_best_only=True,
                                verbose=1,
                                meta=last_meta)

    history = model.fit_generator(
        train.generator(),
        steps_per_epoch=train_num_points // batch_size,
        epochs=epochs,
        validation_data=val.generator(),
        validation_steps=test_num_points // batch_size,
        callbacks=[checkpoint, onetenth_50_75(lr)],
        verbose=1,
        initial_epoch=last_epoch + 1)

    plot_history(history, './results/')
    save_history(history, './results/')
    model.save_weights('./results/pointnet_weights.h5')
Example #3
0
def main():
    nb_classes = 40
    train_file = 'PATH/ModelNet40/ply_data_train.h5'
    val_file = 'PATH/ModelNet40/ply_data_test.h5'

    num_samples_train = len(h5py.File(train_file, mode='r')['data'])
    num_samples_val = len(h5py.File(val_file, mode='r')['data'])
    epochs = 500
    batch_size = 32

    train = DataGenerator(train_file, batch_size, nb_classes, train=True)
    val = DataGenerator(val_file, batch_size, nb_classes, train=False)

    model = point_mask(nb_classes)
    model.summary()
    lr = 0.001
    adam = Adam(lr=lr)
    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    if not os.path.exists('PATH/results/'):
        os.mkdir('PATH/results/')
    checkpoint = ModelCheckpoint('PATH/results/pointmask.h5',
                                 monitor='val_acc',
                                 mode='max',
                                 save_weights_only=True,
                                 save_best_only=True,
                                 verbose=1)
    history = model.fit_generator(
        train.generator(),
        steps_per_epoch=num_samples_train // batch_size,
        validation_data=val.generator(),
        validation_steps=num_samples_val // batch_size,
        epochs=epochs,
        callbacks=[checkpoint, onetenth_50_75(lr)],
        verbose=1)

    plot_history(history, 'PATH/results/')
    save_history(history, 'PATH/results/')
train_file = '/home/changetest/datasets/Modelnet40/ply_data_train.h5'
test_file = '/home/changetest/datasets/Modelnet40/ply_data_test.h5'

epochs = 100
batch_size = 32

train = DataGenerator(train_file, batch_size, nb_classes, train=True)
val = DataGenerator(test_file, batch_size, nb_classes, train=False)

model.summary()
lr = 0.0001
adam = Adam(lr=lr)
model.compile(optimizer=adam,
              loss='categorical_crossentropy',
              metrics=['accuracy'])

if not os.path.exists('./results/'):
    os.mkdir('./results/')
checkpoint = ModelCheckpoint('./results/pointnet.h5',
                             monitor='val_acc',
                             save_weights_only=True,
                             save_best_only=True,
                             verbose=1)
model.fit_generator(train.generator(),
                    steps_per_epoch=9840 // batch_size,
                    epochs=epochs,
                    validation_data=val.generator(),
                    callbacks=[checkpoint, onetenth_50_75(lr)],
                    validation_steps=2468 // batch_size,
                    verbose=1)
def main():

    # Check command line arguments.
    #if len(sys.argv) != 2 or sys.argv[1] not in model_names:
    #    print("Must provide name of model.")
    #    print("Options: " + " ".join(model_names))
    #    exit(0)
    #model_name = sys.argv[1]

    # Data preparation.
    nb_classes = 40
    train_file = './ModelNet40/ply_data_train.h5'
    test_file = './ModelNet40/ply_data_test.h5'

    # Hyperparameters.
    number_of_points = 1024
    epochs = 100
    batch_size = 32

    # Data generators for training and validation.
    train = DataGenerator(train_file,
                          batch_size,
                          number_of_points,
                          nb_classes,
                          train=True)
    val = DataGenerator(test_file,
                        batch_size,
                        number_of_points,
                        nb_classes,
                        train=False)

    # Create the model.
    if model_name == "pointnet":
        model = create_pointnet(number_of_points, nb_classes)
    elif model_name == "gapnet":
        model = GAPNet()
    model.summary()

    # Ensure output paths.
    output_path = "logs"
    if not os.path.exists(output_path):
        os.mkdir(output_path)
    output_path = os.path.join(output_path, model_name)
    if not os.path.exists(output_path):
        os.mkdir(output_path)
    output_path = os.path.join(output_path, training_name)
    if os.path.exists(output_path):
        shutil.rmtree(output_path)
    os.mkdir(output_path)

    # Compile the model.
    lr = 0.0001
    adam = Adam(lr=lr)
    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    # Checkpoint callback.
    checkpoint = ModelCheckpoint(os.path.join(output_path, "model.h5"),
                                 monitor="val_acc",
                                 save_weights_only=True,
                                 save_best_only=True,
                                 verbose=1)

    # Logging training progress with tensorboard.
    tensorboard_callback = tf.keras.callbacks.TensorBoard(
        log_dir=output_path,
        histogram_freq=0,
        batch_size=32,
        write_graph=True,
        write_grads=False,
        write_images=True,
        embeddings_freq=0,
        embeddings_layer_names=None,
        embeddings_metadata=None,
        embeddings_data=None,
        update_freq="epoch")

    callbacks = []
    #callbacks.append(checkpoint)
    callbacks.append(onetenth_50_75(lr))
    callbacks.append(tensorboard_callback)

    # Train the model.
    history = model.fit_generator(train.generator(),
                                  steps_per_epoch=9840 // batch_size,
                                  epochs=epochs,
                                  validation_data=val.generator(),
                                  validation_steps=2468 // batch_size,
                                  callbacks=callbacks,
                                  verbose=1)

    # Save history and model.
    plot_history(history, output_path)
    save_history(history, output_path)
    model.save_weights(os.path.join(output_path, "model_weights.h5"))