def get_loss(config, args):
    model_config = config["model"]
    training_config = config["training"]
    if model_config["name"] == "ssd_vgg16":
        return SSD_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])
    elif model_config["name"] == "ssd_mobilenetv1":
        return SSD_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])
    elif model_config["name"] == "ssd_mobilenetv2":
        return SSD_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])
    elif model_config["name"] == "tbpp_vgg16":
        return TBPP_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])

    elif model_config["name"] == "qssd_vgg16":
        return QSSD_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])

    elif model_config["name"] == "qssd_mobilenetv2":
        return QSSD_LOSS(
            alpha=training_config["alpha"],
            min_negative_boxes=training_config["min_negative_boxes"],
            negative_boxes_ratio=training_config["negative_boxes_ratio"])
    else:
        print(
            f"model with name ${model_config['name']} has not been implemented yet"
        )
        exit()
示例#2
0
def train_ssd_mobilenetv2(config, args):
    """"""
    assert args.label_maps is not None, "please specify a label maps file for this model"
    assert os.path.exists(args.label_maps), "label_maps file does not exist"
    with open(args.label_maps, "r") as file:
        label_maps = [line.strip("\n") for line in file.readlines()]

    assert args.training_split is not None, "please specify a training split file for this model"
    assert os.path.exists(
        args.training_split), "training_split file does not exist"

    training_samples = data_utils.get_samples_from_split(
        split_file=args.training_split,
        images_dir=args.images_dir,
        labels_dir=args.labels_dir)
    assert args.batch_size <= len(
        training_samples
    ), "batch_size less than or equal to len(training_samples)"

    training_config = config["training"]
    model = SSD_MOBILENETV2(config=config)
    loss = SSD_LOSS(
        alpha=training_config["alpha"],
        min_negative_boxes=training_config["min_negative_boxes"],
        negative_boxes_ratio=training_config["negative_boxes_ratio"])
    optimizer = SGD(lr=args.learning_rate,
                    momentum=0.9,
                    decay=0.0005,
                    nesterov=False)
    generator = SSD_DATA_GENERATOR(
        samples=training_samples,
        label_maps=label_maps,
        config=config,
        shuffle=args.shuffle,
        batch_size=args.batch_size,
        augment=args.augment,
        process_input_fn=mobilenet_v2.preprocess_input)
    model.compile(optimizer=optimizer, loss=loss.compute)
    return model, generator, optimizer, loss, training_samples
    label_maps = [line.strip("\n") for line in file.readlines()]

if not os.path.exists(args.output_dir):
    os.makedirs(args.output_dir)

with open(args.config, "r") as config_file:
    config = json.load(config_file)

model_config = config["model"]
training_config = config["training"]

if model_config["name"] == "ssd_vgg16":
    process_input_fn = vgg16.preprocess_input
    model = SSD_VGG16(config=config, label_maps=label_maps)
    loss = SSD_LOSS(
        alpha=training_config["alpha"],
        min_negative_boxes=training_config["min_negative_boxes"],
        negative_boxes_ratio=training_config["negative_boxes_ratio"])
    optimizer = SGD(lr=args.learning_rate,
                    momentum=0.9,
                    decay=0.0005,
                    nesterov=False)
    model.compile(optimizer=optimizer, loss=loss.compute)
elif model_config["name"] == "ssd_mobilenetv1":
    process_input_fn = mobilenet.preprocess_input
    model = SSD_MOBILENET(config=config, label_maps=label_maps)
    loss = SSD_LOSS(
        alpha=training_config["alpha"],
        min_negative_boxes=training_config["min_negative_boxes"],
        negative_boxes_ratio=training_config["negative_boxes_ratio"])
    optimizer = SGD(lr=args.learning_rate,
                    momentum=0.9,
示例#4
0
def ssd_mobilenetv1(config, args):
    training_config = config["training"]
    with open(args.label_maps, "r") as label_map_file:
        label_maps = [i.strip("\n") for i in label_map_file.readlines()]

    training_samples = data_utils.get_samples_from_split(
        split_file=args.training_split,
        images_dir=args.images_dir,
        labels_dir=args.labels_dir)

    if args.validation_split is not None:
        validation_samples = data_utils.get_samples_from_split(
            split_file=args.validation_split,
            images_dir=args.images_dir,
            labels_dir=args.labels_dir)

    training_data_generator = SSD_DATA_GENERATOR(
        samples=training_samples,
        config=config,
        label_maps=label_maps,
        shuffle=args.shuffle,
        batch_size=args.batch_size,
        augment=args.augment,
        process_input_fn=preprocess_input)

    if args.validation_split is not None:
        print("-- validation split specified")
        validation_data_generator = SSD_DATA_GENERATOR(
            samples=validation_samples,
            config=config,
            label_maps=label_maps,
            shuffle=args.shuffle,
            batch_size=args.batch_size,
            augment=False,
            process_input_fn=preprocess_input)

    loss = SSD_LOSS(
        alpha=training_config["alpha"],
        min_negative_boxes=training_config["min_negative_boxes"],
        negative_boxes_ratio=training_config["negative_boxes_ratio"])

    model = SSD_MOBILENET(config=config,
                          label_maps=label_maps,
                          is_training=True)

    optimizer = SGD(lr=args.learning_rate,
                    momentum=0.9,
                    decay=0.0005,
                    nesterov=False)

    model.compile(optimizer=optimizer, loss=loss.compute)

    if args.checkpoint is not None:
        assert os.path.exists(args.checkpoint), "checkpoint does not exist"
        model.load_weights(args.checkpoint, by_name=True)

    model.fit(
        x=training_data_generator,
        validation_data=validation_data_generator
        if args.validation_split is not None else None,
        batch_size=args.batch_size,
        validation_batch_size=args.batch_size,
        epochs=args.epochs,
        callbacks=[
            ModelCheckpoint(filepath=os.path.join(
                args.output_dir, "cp_{epoch:02d}_loss-{loss:.2f}.h5"
                if args.validation_split is None else
                "cp_{epoch:02d}_loss-{loss:.2f}_valloss-{val_loss:.2f}.h5"),
                            save_weights_only=True,
                            monitor='loss'
                            if args.validation_split is None else 'val_loss',
                            mode='min')
        ])

    model.save_weights(os.path.join(args.output_dir, "model.h5"))
示例#5
0
def ssd_vgg16(config, args, callbacks):
    training_config = config["training"]
    with open(args.label_maps, "r") as label_map_file:
        label_maps = [i.strip("\n") for i in label_map_file.readlines()]

    training_samples = data_utils.get_samples_from_split(
        split_file=args.training_split,
        images_dir=args.images_dir,
        labels_dir=args.labels_dir)

    if args.validation_split is not None:
        validation_samples = data_utils.get_samples_from_split(
            split_file=args.validation_split,
            images_dir=args.images_dir,
            labels_dir=args.labels_dir)

    training_data_generator = SSD_DATA_GENERATOR(
        samples=training_samples,
        config=config,
        label_maps=label_maps,
        shuffle=args.shuffle,
        batch_size=args.batch_size,
        augment=args.augment,
        process_input_fn=preprocess_input)

    if args.validation_split is not None:
        print("-- validation split specified")
        validation_data_generator = SSD_DATA_GENERATOR(
            samples=validation_samples,
            config=config,
            label_maps=label_maps,
            shuffle=args.shuffle,
            batch_size=args.batch_size,
            augment=False,
            process_input_fn=preprocess_input)

    loss = SSD_LOSS(
        alpha=training_config["alpha"],
        min_negative_boxes=training_config["min_negative_boxes"],
        negative_boxes_ratio=training_config["negative_boxes_ratio"])

    if training_config["optimizer"]["name"] == "adam":
        optimizer = Adam(learning_rate=args.learning_rate,
                         beta_1=training_config["optimizer"]["beta_1"],
                         beta_2=training_config["optimizer"]["beta_2"],
                         epsilon=training_config["optimizer"]["epsilon"],
                         decay=training_config["optimizer"]["decay"])
    elif training_config["optimizer"]["name"] == "sgd":
        optimizer = SGD(learning_rate=args.learning_rate,
                        momentum=training_config["optimizer"]["momentum"],
                        decay=training_config["optimizer"]["decay"],
                        nesterov=training_config["optimizer"]["nesterov"])
    else:
        optimizer = Adam(learning_rate=args.learning_rate,
                         beta_1=0.9,
                         beta_2=0.999,
                         epsilon=1e-08,
                         decay=0.0)

    model = SSD_VGG16(config=config, label_maps=label_maps, is_training=True)

    if args.show_network_structure:
        model.summary()

    model.compile(optimizer=optimizer, loss=loss.compute)

    if args.checkpoint is not None:
        assert os.path.exists(args.checkpoint), "checkpoint does not exist"
        model.load_weights(args.checkpoint, by_name=True)

    model.fit(
        x=training_data_generator,
        validation_data=validation_data_generator
        if args.validation_split is not None else None,
        batch_size=args.batch_size,
        validation_batch_size=args.batch_size,
        epochs=args.epochs,
        initial_epoch=args.initial_epoch,
        callbacks=callbacks,
    )

    model.save_weights(os.path.join(args.output_dir, "model.h5"))