Beispiel #1
0
def read_model(model_path, config):
    """Read keras retinanet model from keras.model.save()"""
    # Suppress user warning, module does not need to be compiled for prediction
    with warnings.catch_warnings():
        # warnings.simplefilter('ignore', UserWarning)
        model = models.load_model(model_path, backbone_name='resnet50')

    return model
Beispiel #2
0
    def __init__(self, weights=None, saved_model=None):
        self.weights = weights
        self.saved_model = saved_model

        # Read config file - if a config file exists in local dir use it,
        # if not use installed.
        if os.path.exists("deepforest_config.yml"):
            config_path = "deepforest_config.yml"
        else:
            try:
                config_path = get_data("deepforest_config.yml")
            except Exception as e:
                raise ValueError(
                    "No deepforest_config.yml found either in local "
                    "directory or in installed package location. {}".format(e))

        print("Reading config file: {}".format(config_path))
        self.config = utilities.read_config(config_path)

        # Create a label dict, defaults to "Tree"
        self.read_classes()

        # release version id to flag if release is being used
        self.__release_version__ = None

        # Load saved model if needed
        if self.saved_model:
            print("Loading saved model")
            # Capture user warning, not relevant here
            with warnings.catch_warnings():
                warnings.filterwarnings("ignore", category=UserWarning)
                self.model = models.load_model(saved_model)
                self.prediction_model = convert_model(self.model)

        elif self.weights:
            print("Creating model from weights")
            backbone = models.backbone(self.config["backbone"])
            self.model, self.training_model, self.prediction_model = create_models(
                backbone.retinanet, num_classes=1, weights=self.weights)
        else:
            print(
                "A blank deepforest object created. "
                "To perform prediction, either train or load an existing model."
            )
            self.model = None
Beispiel #3
0
def main(forest_object,
         args=None,
         input_type="fit_generator",
         list_of_tfrecords=None,
         comet_experiment=None):
    """
    Main Training Loop
    Args:
        forest_object: a deepforest class object
        args: Keras retinanet argparse
        list_of_tfrecords: list of tfrecords to parse
        input_type: "fit_generator" or "tfrecord" input type
    """
    # parse arguments
    if args is None:
        args = sys.argv[1:]
    args = parse_args(args)

    # create object that stores backbone information
    backbone = models.backbone(args.backbone)

    # make sure keras is the minimum required version
    check_keras_version()

    # optionally choose specific GPU
    if args.gpu:
        setup_gpu(args.gpu)

    # optionally load config parameters
    if args.config:
        args.config = read_config_file(args.config)

    # data input
    if input_type == "fit_generator":
        # create the generators
        train_generator, validation_generator = create_generators(
            args, backbone.preprocess_image)

        # placeholder target tensor for creating models
        targets = None

    elif input_type == "tfrecord":
        # Create tensorflow iterators
        iterator = tfrecords.create_dataset(list_of_tfrecords, args.batch_size)
        next_element = iterator.get_next()

        # Split into inputs and targets
        inputs = next_element[0]
        targets = [next_element[1], next_element[2]]

        validation_generator = None

    else:
        raise ValueError(
            "{} input type is invalid. Only 'tfrecord' or 'for_generator' "
            "input types are accepted for model training".format(input_type))

    # create the model
    if args.snapshot is not None:
        print('Loading model, this may take a second...')
        model = models.load_model(args.snapshot, backbone_name=args.backbone)
        training_model = model
        anchor_params = None
        if args.config and 'anchor_parameters' in args.config:
            anchor_params = parse_anchor_parameters(args.config)
        prediction_model = retinanet_bbox(model=model,
                                          anchor_params=anchor_params)
    else:
        weights = args.weights
        # default to imagenet if nothing else is specified
        if weights is None and args.imagenet_weights:
            weights = backbone.download_imagenet()

        print('Creating model, this may take a second...')
        if input_type == "fit_generator":
            num_of_classes = train_generator.num_classes()
        else:
            # Add background class
            num_of_classes = len(forest_object.labels.keys())

        model, training_model, prediction_model = create_models(
            backbone_retinanet=backbone.retinanet,
            num_classes=num_of_classes,
            weights=weights,
            multi_gpu=args.multi_gpu,
            freeze_backbone=args.freeze_backbone,
            lr=args.lr,
            config=args.config,
            targets=targets,
            freeze_layers=args.freeze_layers)

    # print model summary
    print(model.summary())

    # this lets the generator compute backbone layer shapes using the actual backbone model
    if 'vgg' in args.backbone or 'densenet' in args.backbone:
        train_generator.compute_shapes = make_shapes_callback(model)
        if validation_generator:
            validation_generator.compute_shapes = train_generator.compute_shapes

    # create the callbacks
    callbacks = create_callbacks(model, training_model, prediction_model,
                                 validation_generator, args, comet_experiment)

    if not args.compute_val_loss:
        validation_generator = None

    # start training
    if input_type == "fit_generator":
        history = training_model.fit_generator(
            generator=train_generator,
            steps_per_epoch=args.steps,
            epochs=args.epochs,
            verbose=1,
            callbacks=callbacks,
            workers=args.workers,
            use_multiprocessing=args.multiprocessing,
            max_queue_size=args.max_queue_size,
            validation_data=validation_generator)
    elif input_type == "tfrecord":

        # Fit model
        history = training_model.fit(x=inputs,
                                     steps_per_epoch=args.steps,
                                     epochs=args.epochs,
                                     callbacks=callbacks)
    else:
        raise ValueError(
            "{} input type is invalid. Only 'tfrecord' or 'for_generator' "
            "input types are accepted for model training".format(input_type))

    # Assign history to deepforest model class
    forest_object.history = history

    # return trained model
    return model, prediction_model, training_model