Exemplo n.º 1
0
def loadModel(config, modelPath):
    model = RFCN_Model(mode="inference",
                       config=config,
                       model_dir=os.path.join(ROOT_DIR, "logs"))
    if not modelPath:
        modelPath = model.find_last()[1]
    print("Loading weights from: {}".format(modelPath))
    if modelPath and os.path.isfile(modelPath):
        # Load trained weights
        model.load_weights(modelPath, by_name=True)
    else:
        raise AssertionError("Model weight file does not exists")
    return model
Exemplo n.º 2
0
    # Validation dataset
    dataset_val = FashionDataset()
    dataset_val.initDB(5000, start=100000)
    dataset_val.prepare()

    model = RFCN_Model(mode="training",
                       config=config,
                       model_dir=os.path.join(ROOT_DIR, "logs"))

    # This is a hack, bacause the pre-train weights are not fit with dilated ResNet
    # model.model.load_weights(
    #     "resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5", by_name=True, skip_mismatch=True)

    try:
        model_path = model.find_last()[1]
        if model_path is not None:
            model.load_weights(model_path, by_name=True)
    except Exception as e:
        print(e)
        print("No checkpoint founded")

    # *** This training schedule is an example. Update to your needs ***

    # Training - Stage 1
    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=20,
                layers='heads')
Exemplo n.º 3
0
def main():
    # tensorflow.compat.v1.disable_resource_variables()
    # with tensorflow.compat.v1.Session() as ses:
    # ses = tensorflow.compat.v1.Session()
    # tensorflow.debugging.set_log_device_placement(True)
    ROOT_DIR = os.getcwd()
    # # inisialisasi config
    config = Config()

    # inisialisasi dataset training
    dataset_train = DeepDriveDataset()
    dataset_train.initDB(
        "d:/bdd100k/detection_image_train.json",
        "D:/Workspace/College/Semester 8/Tugas Akhir/codeR-FCN/bdd100k/images/100k/train",
        70000)
    dataset_train.prepare()
    print(dataset_train.image_ids, "masuk d ong")

    # Validation dataset
    dataset_val = DeepDriveDataset()
    dataset_val.initDB(
        "d:/bdd100k/detection_image_val.json",
        "D:/Workspace/College/Semester 8/Tugas Akhir/codeR-FCN/bdd100k/images/100k/val",
        10000)
    dataset_val.prepare()
    # siapin model rfcn untuk training
    model = RFCN_Model(mode="training",
                       model_dir=os.path.join(ROOT_DIR, "ModelData"))
    # init = tf.global_variables_initializer()
    try:
        model_path = model.find_last()[1]
        print(model_path)
        if model_path is not None:
            model.load_weights(model_path, by_name=True)
    except Exception as e:
        print(e)
        print("No checkpoint founded")

    # K.manual_variable_initialization(True)
    model.save("D:/weight.h5")
    # tensorflow.compat.v1.enable_resource_variables()
    # training begin
    # stage 1
    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=2,
                layers='heads')
    model.save("D:/weight.h5")
    # Training - Stage 2
    # Finetune layers from ResNet stage 4 and up
    print("Fine tune Resnet stage 4 and up")
    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=40,
                layers='4+')
    model.save("D:/weight.h5")
    # Training - Stage 3
    # Fine tune all layers
    print("Fine tune all layers")
    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=80,
                layers='all')
    model.save("D:/weight.h5")
    # Training - Stage 3
    # Fine tune all layers
    print("Fine tune all layers")
    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=240,
                layers='all')
    model.save("D:/weight.h5")