Ejemplo n.º 1
0
def get_model(config, model_path=None):
    """
    Loads and returns MaskRCNN model for a given config and weights.
    """

    model = MaskRCNN(mode="inference", config=config, model_dir='../')

    # Get path to saved weights
    # Either set a specific path or find last trained weights
    # model_path = os.path.join(ROOT_DIR, ".h5 file name here")
    if model_path is None:
        model_path = model.find_last()[1]
        try:
            os.rename(model_path, model_path)
            print('Access on file ' + model_path + ' is available!')
            from shutil import copyfile
            dst = '../data/mask_rcnn_temp.h5'
            copyfile(model_path, dst)
            model_path = dst
        except OSError as e:
            print('Access-error on file "' + model_path + '"! \n' + str(e))

    # Load trained weights (fill in path to trained weights here)
    assert model_path != "", "Provide path to trained weights"
    print("Loading weights from ", model_path)

    model.load_weights(model_path, by_name=True)

    return model
Ejemplo n.º 2
0
def TrainMaskRCNN():
    ROOT_DIR = "C:\\Users\\jxmr\\Desktop\\ProjectIII\\OCRDataset\\Segmentation\\Data"

    # Directory to save logs and trained model
    MODEL_DIR = os.path.join(ROOT_DIR, "logs")

    ## Local path to trained weights file
    #COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
    ## Download COCO trained weights from Releases if needed
    #if not os.path.exists(COCO_MODEL_PATH):
    #    utils.download_trained_weights(COCO_MODEL_PATH)

    config = SegmentationConfig()

    # Training dataset
    dataset_train = SegmentationDataset()
    dataset_train.load(501, 24889)
    dataset_train.prepare()

    # Validation dataset
    dataset_val = SegmentationDataset()
    dataset_val.load(1, 500)
    dataset_val.prepare()

    model = MaskRCNN(mode="training", config=config, model_dir=MODEL_DIR)

    #model.load_weights(COCO_MODEL_PATH, by_name=True, exclude=["mrcnn_class_logits", "mrcnn_bbox_fc", "mrcnn_bbox", "mrcnn_mask"])

    model.train(dataset_train,
                dataset_val,
                learning_rate=config.LEARNING_RATE,
                epochs=8,
                layers='all')
    model_path = os.path.join(MODEL_DIR, "mask_rcnn_segmentation.h5")
    model.keras_model.save_weights(model_path)
Ejemplo n.º 3
0
def main():
    config = TronConfig()
    config.display()


    model = MaskRCNN(mode="training", config=config)

    # Load weights 不能加载101
    # model_path = "./pretrained/resnet101_weights_tf.h5"

    # model.load_weights(model_path, by_name=True, exclude=[ "mrcnn_class_logits", "mrcnn_bbox_fc", "mrcnn_bbox", "mrcnn_mask"])
    # model_path = "./pretrained/mask_rcnn_coco.h5"

    model_path = "./pretrained/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5"
    print("Loading weights ", model_path)
    model.load_weights(model_path, by_name=True)


    # Image Augmentation
    # Right/Left flip 50% of the time
    augmentation = imgaug.augmenters.Fliplr(0.5)


    # Training - Stage 1
    print("Training network heads")
    model.train(learning_rate=config.LEARNING_RATE,
                epochs=2,
                layers='head',
                augmentation=augmentation)

    # Training - Stage 2
    # Finetune layers from ResNet stage 4 and up
    print("Fine tune Resnet stage 4 and up")
    model.train(learning_rate=config.LEARNING_RATE,
                epochs=2,
                layers='4+', # OOM
                augmentation=augmentation)

    # Training - Stage 3
    # Fine tune all layers
    print("Fine tune all layers")
    model.train(learning_rate=config.LEARNING_RATE / 10,
                epochs=2,
                layers='all',
                augmentation=augmentation)
Ejemplo n.º 4
0
def load_model(MODEL_DIR,config):
    
    # Recreate the model in inference mode
    model = MaskRCNN(mode="inference", 
                              config=config,
                              model_dir=MODEL_DIR)

    # Get path to saved weights
    # Either set a specific path or find last trained weights

    model_path = os.path.join(MODEL_DIR)
    # model_path = os.path.join(ROOT_DIR, r'/logs')
    # model_path = model.find_last()

    # Load trained weights
    print("Loading weights from ", model_path)
    model.load_weights(model_path, by_name=True)
    return model
Ejemplo n.º 5
0
                {"input_image": input_image, 'input_image_meta': input_image_meta, 'input_anchors': input_anchors},
                {"mrcnn_detection/Reshape_1": output_detection, 'mrcnn_mask/Reshape_1': output_mask})

        builder.add_meta_graph_and_variables(sess, [tag_constants.SERVING],
                                             signature_def_map=sigs)

    builder.save()
    print("*" * 80)
    print("FINISH CONVERTING FROZEN PB TO SERVING READY")
    print("PATH:", PATH_TO_SAVE_TENSORFLOW_SERVING_MODEL)
    print("*" * 80)


# Load Mask RCNN config
# you can also load your own config in here.
# config = your_custom_config_class
config = get_config()

# LOAD MODEL
model = MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
model.load_weights(H5_WEIGHT_PATH, by_name=True)

# Converting keras model to PB frozen graph
freeze_model(model.keras_model, FROZEN_NAME)

# Now convert frozen graph to Tensorflow Serving Ready
make_serving_ready(os.path.join(PATH_TO_SAVE_FROZEN_PB, FROZEN_NAME),
                   PATH_TO_SAVE_TENSORFLOW_SERVING_MODEL, VERSION_NUMBER)

print("COMPLETED")
Ejemplo n.º 6
0
    args = parser.parse_args()
    print("Model: ", args.model)
    print("Dataset: ", args.dataset)
    print("Year: ", args.year)
    print("Logs: ", args.logs)
    print("Target: ", args.target)
    print("Show Perturbation: ", args.show_perturbation)
    print("Use Mask: ", args.use_mask)
    # print("Auto Download: ", args.download)

    config = CocoConfig()
    config.display()

    # Create model
    model = MaskRCNN(config=config, model_dir=args.logs)
    if config.GPU_COUNT:
        model = model.cuda()

    # Select weights file to load
    model_path = COCO_MODEL_PATH

    # Load weights
    print("Loading weights ", model_path)
    model.load_weights(model_path)

    dataset_train = CocoDataset()
    #dataset_train.load_coco(args.dataset, "minival", year=args.year, auto_download=args.download)  # Uncomment to get all coco images
    if args.target is not None and args.target != "":
        dataset_train.load_coco(args.dataset,
                                "adversarial_attack_target_" + args.target,
Ejemplo n.º 7
0
            NUM_CLASSES = nr_classes
            LEARNING_RATE = args.lrate
        config = TacoTrainConfig()
    else:
        class TacoTestConfig(Config):
            NAME = "taco"
            GPU_COUNT = 1
            IMAGES_PER_GPU = 1
            DETECTION_MIN_CONFIDENCE = 0.9
            NUM_CLASSES = nr_classes
        config = TacoTestConfig()
    config.display()

    # Create model
    if args.command == "train":
        model = MaskRCNN(mode="training", config=config, model_dir=DEFAULT_LOGS_DIR)
    else:
        model = MaskRCNN(mode="inference", config=config, model_dir=DEFAULT_LOGS_DIR)

    # Select weights file to load
    if args.model.lower() == "coco":
        model_path = COCO_MODEL_PATH
        # Download weights file
        if not os.path.exists(model_path):
            utils.download_trained_weights(model_path)
    elif args.model.lower() == "last":
        # Find last trained weights
        model_path = model.find_last()[1]
    elif args.model.lower() == "imagenet":
        # Start from ImageNet trained weights
        model_path = model.get_imagenet_weights()
Ejemplo n.º 8
0
    shutil.rmtree(tmp_dir, ignore_errors=True)

if __name__ == '__main__':
    import argparse

    # Parse command line arguments
    parser = argparse.ArgumentParser(description='Run Mask R-CNN trained on TACO on a video file')
    parser.add_argument('--model', required=True, help="Path to weights .h5 file")
    parser.add_argument('--input', required=True, help='Path to input video/image')
    parser.add_argument('--output', default="annotated.mp4", help='output path')

    args = parser.parse_args()

    class_names = ['BG', 'Bottle', 'Bottle cap', 'Can', 'Cigarette', 'Cup', 'Lid', 'Other', 'Plastic bag + wrapper', 'Pop tab', 'Straw']

    # Configurations
    class TacoTestConfig(Config):
        NAME = "taco"
        GPU_COUNT = 1
        IMAGES_PER_GPU = 1
        DETECTION_MIN_CONFIDENCE = 10
        NUM_CLASSES = len(class_names)
        USE_OBJECT_ZOOM = False
    config = TacoTestConfig()
    #config.display()

    model = MaskRCNN(mode="inference", config=config, model_dir="logs/")
    model.load_weights(args.model, args.model, by_name=True)

    annotate_video(model, class_names, args.input, args.output)
Ejemplo n.º 9
0
    dataset_train = KaggleDataset()
    dataset_train.load_shapes(train_list, train_path)
    dataset_train.prepare()

    # initialize validation dataset
    dataset_val = KaggleDataset()
    dataset_val.load_shapes(val_list, train_path)
    dataset_val.prepare()

    # Create model configuration in training mode
    config = KaggleBowlConfig()
    config.STEPS_PER_EPOCH = len(train_list) // config.BATCH_SIZE
    config.VALIDATION_STEPS = len(val_list) // config.BATCH_SIZE
    config.display()

    model = MaskRCNN(mode="training", config=config, model_dir=MODEL_DIR)

    # Model weights to start training with
    init_with = "imagenet"  # imagenet, last, or some pretrained model

    if init_with == "imagenet":
        weights_path = model.get_imagenet_weights()
        model.load_weights(weights_path, by_name=True)

    elif init_with == "last":
        # Load the last model you trained and continue training
        weights_path = model.find_last()[1]
        model.load_weights(weights_path, by_name=True)
    elif init_with == 'pretrained':
        weights_path = '../data/pretrained_model.h5'
        model.load_weights(weights_path, by_name=True)
#                         md5_hash='a268eb855778b3df3c7506639542a6af')
# return weights_path

# Directory of images to run detection on
mode = 'inference'
count_class = 1 + 80
count_gpu = 1
count_image_per_gpu = 1
json_config_coco = ConfigMaskrcnn.create_json(count_class, count_gpu,
                                              count_image_per_gpu)
json_config_coco[
    'BACKBONE'] = "resnet101"  #TODO redundant, Supported values are: resnet50, resnet101. We could also insert other backbones?

# Create model object in inference mode.
config_coco = ConfigMaskrcnn(json_config_coco)
manager_model = MaskRCNN(mode, config_coco)

# Load weights trained on MS-COCO
manager_model.load_weights(path_file_weight, by_name=True)

input_shape = manager_model.keras_model.layers[0].input_shape
format_input = struct_format_input.create([input_shape[1], input_shape[2]])
output_shape = manager_model.keras_model.layers[-1].output_shape
print(manager_model.keras_model.layers[-1].output_shape)
format_output = struct_format_output.create('maskrcnn', output_shape[1])

# COCO Class names #TODO create labelling from this
# Index of the class in the list is its ID. For example, to get ID of
# the teddy bear class, use: class_names.index('teddy bear')

list_name_class = [
Ejemplo n.º 11
0
        config = WallyConfig()
    else:

        class InferenceConfig(WallyConfig):
            # Set batch size to 1 since we'll be running inference on
            # one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
            GPU_COUNT = 1
            IMAGES_PER_GPU = 1

        config = InferenceConfig()
    config.display()

    # Create model
    if args.command == "train":
        model = MaskRCNN(mode="training",
                         config=config,
                         model_dir=config.MODEL_DIR)
    else:
        model = MaskRCNN(mode="inference",
                         config=config,
                         model_dir=config.MODEL_DIR)

    # Select weights file to load
    if args.weights.lower() == "coco":
        weights_path = config.COCO_WEIGHTS_PATH
        # Download weights file
        if not os.path.exists(weights_path):
            utils.download_trained_weights(weights_path)
    elif args.weights.lower() == "last":
        # Find last trained weights
        weights_path = model.find_last()[1]
    ).unsqueeze(1).unsqueeze(2).unsqueeze(0).expand_as(images)


def unmold_image_tensor(normalized_images, config):
    """Takes a image normalized with mold() and returns the original."""
    return normalized_images + torch.from_numpy(
        config.MEAN_PIXEL).float().cuda().unsqueeze(1).unsqueeze(2).unsqueeze(
            0).expand_as(normalized_images)


if __name__ == '__main__':
    config = CocoConfig()
    config.display()

    # Create model
    model = MaskRCNN(config=config, model_dir=DEFAULT_LOGS_DIR)
    if config.GPU_COUNT:
        model = model.cuda()

    # Select weights file to load
    model_path = COCO_MODEL_PATH

    # Load weights
    print("Loading weights ", model_path)
    model.load_weights(model_path)

    dataset_path = './data/video'

    target = True

    dataset_train = CocoDataset()
Ejemplo n.º 13
0
class InferenceConfig(TronConfig):
    # Set batch size to 1 since we'll be running inference on
    # one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
    GPU_COUNT = 1
    IMAGES_PER_GPU = 1
    # NUM_CLASSES = 1 + 8
    # for coco is 1+80
    NUM_CLASSES = 1 + 80


config = InferenceConfig()
config.display()

# Create model object in inference mode.
model = MaskRCNN(mode="inference", config=config)

# class_names = ['background','lc','ls','fsp','crb',
#                   'lws','lwd','lys','lyd']
class_names = [
    'BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
    'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
    'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
    'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag',
    'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball', 'kite',
    'baseball bat', 'baseball glove', 'skateboard', 'surfboard',
    'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon',
    'bowl', 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
    'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', 'potted plant',
    'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
    'keyboard', 'cell phone', 'microwave', 'oven', 'toaster', 'sink',
Ejemplo n.º 14
0
def RunMaskRCNN():

    pytesseract.pytesseract.tesseract_cmd = r'C:\\Users\\jxmr\\Desktop\\ProjectIII\\wrapper\\tesseract.bat'

    ROOT_DIR = "C:\\Users\\jxmr\\Desktop\\ProjectIII\\OCRDataset\\Segmentation\\Data"

    # Directory to save logs and trained model
    MODEL_DIR = os.path.join(ROOT_DIR, "logs")

    config = SegmentationConfig()

    # Recreate the model in inference mode
    model = MaskRCNN(mode="inference", config=config, model_dir=MODEL_DIR)

    # Get path to saved weights
    # Either set a specific path or find last trained weights
    model_path = os.path.join(MODEL_DIR, "mask_rcnn_shapes.h5")
    #model_path = model.find_last()

    # Load trained weights
    print("Loading weights from ", model_path)
    model.load_weights(model_path, by_name=True)

    nnX = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierX.h5",
        custom_objects={'R_squared': R_squared})
    nnY = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierY.h5",
        custom_objects={'R_squared': R_squared})
    nnZ = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierZ.h5",
        custom_objects={'R_squared': R_squared})
    nnXYZ = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierXYZ.h5",
        custom_objects={'R_squared': R_squared})
    nnHS = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierHorizontalShear.h5",
        custom_objects={'R_squared': R_squared})
    nnVS = load_model(
        "C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\kerasRectifierVerticalShear.h5",
        custom_objects={'R_squared': R_squared})

    image = cv2.imread("C:\\Users\\jxmr\\Downloads\\IMG_20191207_020339.jpg")

    file1 = open("C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\output1.txt",
                 "w")
    print(pytesseract.image_to_string(image), file=file1)
    file1.flush()
    file1.close()

    #plt.subplot(1, 2, 1)
    plt.imshow(image)
    plt.show()

    result = RectifyImage(image, model, nnX, nnZ, nnY, nnXYZ)

    #plt.subplot(1, 2, 2)
    plt.imshow(result)
    plt.show()

    file1 = open("C:\\Users\\jxmr\\Desktop\\ProjectIII\\Data2\\output2.txt",
                 "w")
    print(pytesseract.image_to_string(result), file=file1)
    file1.flush()
    file1.close()
Ejemplo n.º 15
0
ROOT_DIR = os.getcwd()

# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")

DATASET_DIR = os.path.join(ROOT_DIR, "data")
TARGET = "class"
SHOW_PERTURBATION = True
USE_MASK = False

config = CocoConfig()
config.display()

# Create model
model = MaskRCNN(config=config, model_dir="logs/")
if config.GPU_COUNT:
    model = model.cuda()

# Select weights file to load
model_path = model.find_last()[1]

# Load weights
print("Loading weights ", model_path)
model.load_weights(model_path)

dataset_train = CocoDataset()
dataset_train.load_coco(DATASET_DIR,
                        "adversarial_attack_target_" + TARGET,
                        year=2014,
                        auto_download=False)
Ejemplo n.º 16
0
config = TacoTestConfig()

app = flask.Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024


@app.after_request
def cors(env):
    env.headers['Access-Control-Allow-Origin'] = '*'
    env.headers['Access-Control-Allow-Method'] = '*'
    env.headers[
        'Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
    return env


model = MaskRCNN('inference', config, 'models/logs')
model.load_weights(WEIGHT_PATH, None, by_name=True)
model.keras_model._make_predict_function()
class_names = []
with open(MAP_PATH, 'r') as f:
    reader = csv.reader(f)
    class_names += [row[1] for row in reader]
class_names_set = list(set(class_names))
class_names_set.sort(key=class_names.index)


def random_colors(N, bright=True):
    """
    Generate random colors.
    To get visually distinct colors, generate them in HSV space then
    convert to RGB.