Ejemplo n.º 1
0
def TrainMaskRCNN():
    ROOT_DIR = "C:\\Users\\jxmr\\Desktop\\ProjectIII\\OCRDataset\\Segmentation\\Data"

    # Directory to save logs and trained model
    MODEL_DIR = os.path.join(ROOT_DIR, "logs")

    ## Local path to trained weights file
    #COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
    ## Download COCO trained weights from Releases if needed
    #if not os.path.exists(COCO_MODEL_PATH):
    #    utils.download_trained_weights(COCO_MODEL_PATH)

    config = SegmentationConfig()

    # Training dataset
    dataset_train = SegmentationDataset()
    dataset_train.load(501, 24889)
    dataset_train.prepare()

    # Validation dataset
    dataset_val = SegmentationDataset()
    dataset_val.load(1, 500)
    dataset_val.prepare()

    model = MaskRCNN(mode="training", config=config, model_dir=MODEL_DIR)

    #model.load_weights(COCO_MODEL_PATH, by_name=True, exclude=["mrcnn_class_logits", "mrcnn_bbox_fc", "mrcnn_bbox", "mrcnn_mask"])

    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=8,
                layers='all')
    model_path = os.path.join(MODEL_DIR, "mask_rcnn_segmentation.h5")
    model.keras_model.save_weights(model_path)
Ejemplo n.º 2
0
def main():
    config = TronConfig()
    config.display()


    model = MaskRCNN(mode="training", config=config)

    # Load weights 不能加载101
    # model_path = "./pretrained/resnet101_weights_tf.h5"

    # model.load_weights(model_path, by_name=True, exclude=[ "mrcnn_class_logits", "mrcnn_bbox_fc", "mrcnn_bbox", "mrcnn_mask"])
    # model_path = "./pretrained/mask_rcnn_coco.h5"

    model_path = "./pretrained/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5"
    print("Loading weights ", model_path)
    model.load_weights(model_path, by_name=True)


    # Image Augmentation
    # Right/Left flip 50% of the time
    augmentation = imgaug.augmenters.Fliplr(0.5)


    # Training - Stage 1
    print("Training network heads")
    model.train(learning_rate=config.LEARNING_RATE,
                epochs=2,
                layers='head',
                augmentation=augmentation)

    # Training - Stage 2
    # Finetune layers from ResNet stage 4 and up
    print("Fine tune Resnet stage 4 and up")
    model.train(learning_rate=config.LEARNING_RATE,
                epochs=2,
                layers='4+', # OOM
                augmentation=augmentation)

    # Training - Stage 3
    # Fine tune all layers
    print("Fine tune all layers")
    model.train(learning_rate=config.LEARNING_RATE / 10,
                epochs=2,
                layers='all',
                augmentation=augmentation)
Ejemplo n.º 3
0
            'learning_rate': config.LEARNING_RATE,
            'layers_trained': 'all'}

        subdir = os.path.dirname(model.log_dir)
        if not os.path.isdir(subdir):
            os.mkdir(subdir)

        if not os.path.isdir(model.log_dir):
            os.mkdir(model.log_dir)

        train_meta_file = model.log_dir + '_meta.json'
        with open(train_meta_file, 'w+') as f:
            f.write(json.dumps(training_meta))

        # Training all layers
        model.train(dataset_train, dataset_val,learning_rate=config.LEARNING_RATE, epochs=100,
                    layers='all', augmentation=augmentation_pipeline)

        # Training last layers
        # Finetune layers from ResNet stage 4 and up
        # model.train(dataset_train, dataset_val,
        #             learning_rate=config.LEARNING_RATE,
        #             epochs=120,
        #             layers='4+')

        # Training only heads
        # model.train(dataset_train, dataset_val,
        #             learning_rate=config.LEARNING_RATE / 10,
        #             epochs=160,
        #             layers='heads')

    elif args.command == "evaluate":
Ejemplo n.º 4
0
    elif init_with == "last":
        # Load the last model you trained and continue training
        weights_path = model.find_last()[1]
        model.load_weights(weights_path, by_name=True)
    elif init_with == 'pretrained':
        weights_path = '../data/pretrained_model.h5'
        model.load_weights(weights_path, by_name=True)

    print('Loading weights from ', weights_path)

    # Train the model for 75 epochs

    model.train(dataset_train,
                dataset_val,
                learning_rate=1e-4,
                epochs=25,
                verbose=2,
                layers='all')

    model.train(dataset_train,
                dataset_val,
                learning_rate=1e-5,
                epochs=50,
                verbose=2,
                layers='all')

    model.train(dataset_train,
                dataset_val,
                learning_rate=1e-6,
                epochs=75,
                verbose=2,