def resume_finetuning_from_checkpoint(args, ds, finetuned_model_path):
    """Given arguments, dataset object and a finetuned model_path, returns a model
    with loaded weights and returns the checkpoint necessary for resuming training.
    """
    print("[Resuming finetuning from a checkpoint...]")
    if (
        args.dataset in list(transfer_datasets.DS_TO_FUNC.keys())
        and not args.cifar10_cifar10
    ):
        model, _ = model_utils.make_and_restore_model(
            arch=pytorch_models[args.arch](args.pytorch_pretrained)
            if args.arch in pytorch_models.keys()
            else args.arch,
            dataset=datasets.ImageNet(""),
            add_custom_forward=args.arch in pytorch_models.keys(),
        )
        while hasattr(model, "model"):
            model = model.model
        model = fine_tunify.ft(args.arch, model, ds.num_classes, args.additional_hidden)
        model, checkpoint = model_utils.make_and_restore_model(
            arch=model,
            dataset=ds,
            resume_path=finetuned_model_path,
            add_custom_forward=args.additional_hidden > 0
            or args.arch in pytorch_models.keys(),
        )
    else:
        model, checkpoint = model_utils.make_and_restore_model(
            arch=args.arch, dataset=ds, resume_path=finetuned_model_path
        )
    return model, checkpoint
def get_model(args, ds):
    """Given arguments and a dataset object, returns an ImageNet model (with appropriate last layer changes to 
    fit the target dataset) and a checkpoint.The checkpoint is set to None if noe resuming training.
    """
    finetuned_model_path = os.path.join(
        args.out_dir, "checkpoint.pt.latest"
    )
    if args.resume and os.path.isfile(finetuned_model_path):
        model, checkpoint = resume_finetuning_from_checkpoint(
            args, ds, finetuned_model_path
        )
    else:
        if (
            args.dataset in list(transfer_datasets.DS_TO_FUNC.keys())
            and not args.cifar10_cifar10
        ):
            model, _ = model_utils.make_and_restore_model(
                arch=pytorch_models[args.arch](args.pytorch_pretrained)
                if args.arch in pytorch_models.keys()
                else args.arch,
                dataset=datasets.ImageNet(""),
                resume_path=args.model_path,
                pytorch_pretrained=args.pytorch_pretrained,
                add_custom_forward=args.arch in pytorch_models.keys(),
            )
            checkpoint = None
        else:
            model, _ = model_utils.make_and_restore_model(
                arch=args.arch,
                dataset=ds,
                resume_path=args.model_path,
                pytorch_pretrained=args.pytorch_pretrained,
            )
            checkpoint = None

        if not args.no_replace_last_layer and not args.eval_only:
            print(
                f"[Replacing the last layer with {args.additional_hidden} "
                f"hidden layers and 1 classification layer that fits the {args.dataset} dataset.]"
            )
            while hasattr(model, "model"):
                model = model.model
            model = fine_tunify.ft(
                args.arch, model, ds.num_classes, args.additional_hidden
            )
            model, checkpoint = model_utils.make_and_restore_model(
                arch=model,
                dataset=ds,
                add_custom_forward=args.additional_hidden > 0
                or args.arch in pytorch_models.keys(),
            )
        else:
            print("[NOT replacing the last layer]")
    return model, checkpoint
Exemple #3
0
def get_model(args, ds):
    # An option to resume finetuning from a checkpoint. Only for Imagenet-Imagenet transfer
    finetuned_model_path = os.path.join(args.out_dir, args.exp_name,
                                        'checkpoint.pt.latest')
    if args.resume and os.path.isfile(finetuned_model_path):
        model, checkpoint = resume_finetuning_from_checkpoint(
            args, ds, finetuned_model_path)
    else:

        if args.dataset in list(transfer_datasets.DS_TO_FUNC.keys()
                                ) and not args.cifar10_cifar10:
            model, _ = model_utils.make_and_restore_model(
                arch=pytorch_models[args.arch](args.pytorch_pretrained)
                if args.arch in pytorch_models.keys() else args.arch,
                dataset=datasets.ImageNet(''),
                resume_path=args.model_path,
                pytorch_pretrained=args.pytorch_pretrained,
                add_custom_forward=args.arch in pytorch_models.keys())
            checkpoint = None
        else:
            model, _ = model_utils.make_and_restore_model(
                arch=args.arch,
                dataset=ds,
                resume_path=args.model_path,
                pytorch_pretrained=args.pytorch_pretrained)
            checkpoint = None

        # For all other datasets, replace the last layer then finetine, unless otherwise specified using
        # the args.no_replace_last_layer flag
        if not args.no_replace_last_layer and not args.eval_only:
            print(
                f'[Replacing the last layer with {args.additional_hidden} '
                f'hidden layers and 1 classification layer that fits the {args.dataset} dataset.]'
            )
            while hasattr(model, 'model'):
                model = model.model
            model = fine_tunify.ft(args.arch, model, ds.num_classes,
                                   args.additional_hidden)
            model, checkpoint = model_utils.make_and_restore_model(
                arch=model,
                dataset=ds,
                add_custom_forward=args.additional_hidden > 0
                or args.arch in pytorch_models.keys())
        else:
            print('[NOT replacing the last layer]')

    return model, checkpoint
Exemple #4
0
def resume_finetuning_from_checkpoint(args, ds, finetuned_model_path):
    print('[Resuming finetuning from a checkpoint...]')
    if args.dataset in list(
            transfer_datasets.DS_TO_FUNC.keys()) and not args.cifar10_cifar10:
        model, _ = model_utils.make_and_restore_model(
            arch=pytorch_models[args.arch](args.pytorch_pretrained)
            if args.arch in pytorch_models.keys() else args.arch,
            dataset=datasets.ImageNet(''),
            add_custom_forward=args.arch in pytorch_models.keys())
        while hasattr(model, 'model'):
            model = model.model
        model = fine_tunify.ft(args.arch, model, ds.num_classes,
                               args.additional_hidden)
        model, checkpoint = model_utils.make_and_restore_model(
            arch=model,
            dataset=ds,
            resume_path=finetuned_model_path,
            add_custom_forward=args.additional_hidden > 0
            or args.arch in pytorch_models.keys())
    else:
        model, checkpoint = model_utils.make_and_restore_model(
            arch=args.arch, dataset=ds, resume_path=finetuned_model_path)
    return model, checkpoint