コード例 #1
0
def damage_by_segmentation(path):
    """
    Generate solution .png files, using a single multiclass segmentation
    model to do so.
    """
    model = train.build_model(classes=6, damage=True)
    model = train.load_weights(model,
                               "damage-motokimura-mobilenetv2-best.hdf5")
    #model.load_individual_weights()# = train.load_weights(model, S.DMG_MODELSTRING_BEST)
    df = flow.Dataflow(files=flow.get_test_files(),
                       transform=False,
                       batch_size=1,
                       buildings_only=False,
                       shuffle=False,
                       return_postmask=False,
                       return_stacked=True,
                       return_average=False)
    pbar = tqdm.tqdm(total=len(df))

    for image, filename in df:
        filename = os.path.basename(filename)
        filename = filename.replace("pre", "localization").replace(
            ".png", "_prediction.png")
        #if os.path.exists(os.path.join("solution", filename)):
        #    continue

        # localization (segmentation)
        pred = model.predict([image])
        mask = infer.convert_prediction(pred)
        write_solution(names=[filename], images=[mask], path=path)

        filename = filename.replace("localization", "damage")
        write_solution(names=[filename], images=[mask], path=path)

        pbar.update(1)
コード例 #2
0
def damage_random(path):
    """
    Generate solution .png files using random damage.
    """
    model = train.build_model(
        train=False)  #, save_path="motokimura-stacked-2.hdf5")
    model = train.load_weights(model, S.MODELSTRING_BEST)
    df = flow.Dataflow(files=flow.get_test_files(),
                       transform=False,
                       batch_size=1,
                       buildings_only=False,
                       shuffle=False,
                       return_postmask=False,
                       return_stacked=True,
                       return_average=False)
    pbar = tqdm.tqdm(total=len(df))

    for image, filename in df:
        filename = os.path.basename(filename)
        filename = filename.replace("pre", "localization").replace(
            ".png", "_prediction.png")
        #if os.path.exists(os.path.join("solution", filename)):
        #    continue

        # localization (segmentation)
        pred = model.predict([image])
        mask = infer.convert_prediction(pred)
        write_solution(names=[filename], images=[mask], path=path)

        mask = randomize_damage(mask)
        filename = filename.replace("localization", "damage")
        write_solution(names=[filename], images=[mask], path=path)

        pbar.update(1)
コード例 #3
0
def damage_by_building_classification(path):
    """
    Generate solution .png files, classifying damage using contiguous
    regions in the segmentation model's predicted masks in order to extract
    individual building polygons from pre-disaster and post-disaster images.
    """
    # load the localization (segmentation) model
    S.BATCH_SIZE = 1
    model = train.build_model(architecture=S.ARCHITECTURE, train=True)
    model = train.load_weights(
        model, S.MODELSTRING_BEST)  #.replace(".hdf5", "-best.hdf5"))

    # load the damage classification model
    dmg_model = damage.build_model()
    dmg_model = damage.load_weights(dmg_model, S.DMG_MODELSTRING_BEST)

    # get a dataflow for the test files
    df = flow.Dataflow(files=flow.get_test_files(),
                       transform=False,
                       shuffle=False,
                       buildings_only=False,
                       batch_size=1,
                       return_stacked=True)
    i = 0
    pbar = tqdm.tqdm(total=len(df))
    # x = pre-disaster image, y = post-disaster image
    for stacked, filename in df:
        filename = os.path.basename(filename)
        x = stacked
        #filename = os.path.basename(df.samples[i][0].img_name)
        filename = filename.replace("pre", "localization").replace(
            ".png", "_prediction.png")
        #if os.path.exists(os.path.join("solution", filename)):
        #    continue

        # localization (segmentation)
        pred = model.predict(x)
        mask = infer.convert_prediction(pred)
        write_solution(names=[filename], images=[mask], path=path)

        # damage classification
        filename = filename.replace("localization", "damage")
        pre, post = stacked[..., :3], stacked[
            ..., 3:]  #df.samples[i][0].image(), df.samples[i][1].image()
        boxes, coords = flow.Building.get_all_in(pre, post, mask)
        if len(boxes) > 0:
            labels = dmg_model.predict(boxes)
            for k, c in enumerate(coords):
                x, y, w, h = c
                mask[y:y + h, x:x + w] = np.argmax(labels[k]) + 1

        write_solution(names=[filename], images=[mask], path=path)
        pbar.update(1)
        i += 1
コード例 #4
0
def main(predict: ("Do prediction", "flag", "p"),
         image: ("Show this specific image", "option", "i")=""):

    df = flow.Dataflow(files=flow.get_validation_files(), shuffle=True, batch_size=1, buildings_only=True, return_stacked=True, transform=0.5, return_average=False, return_postmask=True)
    if image != "":
        for i in range(len(df.samples)):
            if image in df.samples[i][0].img_name or image in df.samples[i][1].img_name:
                df.samples = [df.samples[i]]
                show(df)

    if predict:
        predict_and_show(df)
    else:
        show(df)
コード例 #5
0
def main(restore: ("Restore from checkpoint", "flag", "r"),
         damage: ("Train a damage classifier (default is localization)",
                  "flag", "d"),
         deeplab: ("Build and train a DeeplabV3+ model", "flag", "D"),
         motokimura: ("Build and train a Motokimura-designed Unet", "flag",
                      "M"),
         verbose: ("Keras verbosity level", "option", "v", int) = 1,
         epochs: ("Number of epochs", "option", "e", int) = 50,
         initial_epoch: ("Initial epoch to continue from", "option", "i",
                         int) = 1,
         optimizer: ("Keras optimizer to use", "option", "o", str) = 'RMSprop',
         loss='categorical_crossentropy'):
    """
    Train a model.
    """
    #    optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer(optimizer, 'dynamic')
    if deeplab:
        logger.info("Building DeeplabV3+ model.")
        model = build_deeplab_model(classes=S.N_CLASSES,
                                    damage=damage,
                                    train=True)
        S.ARCHITECTURE = "deeplab-xception"
    elif motokimura:
        logger.info("Building MotokimuraUnet model.")
        model = build_model(classes=S.N_CLASSES, damage=damage, train=True)
        S.ARCHITECTURE = "motokimura"
    else:
        logger.error("Use -M (motokimura) or -D (deeplab) parameter.")
        sys.exit(-1)

    S.DAMAGE = True if damage else False
    save_path = S.MODELSTRING = f"{S.ARCHITECTURE}.hdf5"
    S.DMG_MODELSTRING = f"damage-{save_path}"
    if restore:
        load_weights(model, save_path)

    metrics = [
        'accuracy', score.num_correct, score.recall, score.tensor_f1_score
    ]

    callbacks = [
        keras.callbacks.ModelCheckpoint(save_path.replace(
            ".hdf5", "-best.hdf5"),
                                        save_weights_only=True,
                                        save_best_only=True),
        #keras.callbacks.TensorBoard(log_dir="logs"),
    ]

    model.compile(optimizer=optimizer, loss=loss, metrics=metrics)

    flowcall = flow.DamagedDataflow if damage else flow.Dataflow
    train_seq = flowcall(files=flow.get_training_files(),
                         batch_size=S.BATCH_SIZE,
                         transform=0.3,
                         shuffle=True,
                         buildings_only=True,
                         return_postmask=True if damage else False,
                         return_stacked=True if damage else True,
                         return_post_only=False if damage else False,
                         return_average=False)
    val_seq = flow.Dataflow(files=flow.get_validation_files(),
                            batch_size=S.BATCH_SIZE,
                            buildings_only=True,
                            shuffle=True,
                            return_postmask=True if damage else False,
                            return_stacked=True if damage else True,
                            return_post_only=False if damage else False,
                            return_average=False)

    logger.info(
        "Training and saving best weights after each epoch (CTRL+C to interrupt)."
    )
    train_stepper(model, train_seq, verbose, epochs, callbacks, save_path,
                  val_seq, initial_epoch)
    save_model(model, save_path)