Exemple #1
0
def sensitivity_analysis(model, criterion, data_loader, loggers, args,
                         sparsities):
    # This sample application can be invoked to execute Sensitivity Analysis on your
    # model.  The ouptut is saved to CSV and PNG.
    msglogger.info("Running sensitivity tests")
    if not isinstance(loggers, list):
        loggers = [loggers]
    test_fnc = partial(
        classifier.test,
        test_loader=data_loader,
        criterion=criterion,
        loggers=loggers,
        args=args,
        activations_collectors=classifier.create_activation_stats_collectors(
            model))
    which_params = [param_name for param_name, _ in model.named_parameters()]
    sensitivity = distiller.perform_sensitivity_analysis(
        model,
        net_params=which_params,
        sparsities=sparsities,
        test_func=test_fnc,
        group=args.sensitivity)
    distiller.sensitivities_to_png(
        sensitivity, os.path.join(msglogger.logdir, 'sensitivity.png'))
    distiller.sensitivities_to_csv(
        sensitivity, os.path.join(msglogger.logdir, 'sensitivity.csv'))
Exemple #2
0
def handle_subapps(model, criterion, optimizer, compression_scheduler, pylogger, args):
    def load_test_data(args):
        test_loader = classifier.load_data(args, load_train=False, load_val=False, load_test=True)
        return test_loader

    do_exit = False
    if args.greedy:
        greedy(model, criterion, optimizer, pylogger, args)
        do_exit = True
    elif args.summary:
        # This sample application can be invoked to produce various summary reports
        for summary in args.summary:
            distiller.model_summary(model, summary, args.dataset)
        do_exit = True
    elif args.export_onnx is not None:
        distiller.export_img_classifier_to_onnx(model,
                                                os.path.join(msglogger.logdir, args.export_onnx),
                                                args.dataset, add_softmax=True, verbose=False)
        do_exit = True
    elif args.qe_calibration and not (args.evaluate and args.quantize_eval):
        classifier.acts_quant_stats_collection(model, criterion, pylogger, args, save_to_file=True)
        do_exit = True
    elif args.activation_histograms:
        classifier.acts_histogram_collection(model, criterion, pylogger, args)
        do_exit = True
    elif args.sensitivity is not None:
        test_loader = load_test_data(args)
        sensitivities = np.arange(*args.sensitivity_range)
        sensitivity_analysis(model, criterion, test_loader, pylogger, args, sensitivities)
        do_exit = True
    elif args.evaluate:
        if args.quantize_eval and args.qe_lapq:
            from ptq_lapq import image_classifier_ptq_lapq
            image_classifier_ptq_lapq(model, criterion, pylogger, args)
        else:
            test_loader = load_test_data(args)
            classifier.evaluate_model(test_loader, model, criterion, pylogger,
                classifier.create_activation_stats_collectors(model, *args.activation_stats),
                args, scheduler=compression_scheduler)
        do_exit = True
    elif args.thinnify:
        assert args.resumed_checkpoint_path is not None, \
            "You must use --resume-from to provide a checkpoint file to thinnify"
        distiller.contract_model(model, compression_scheduler.zeros_mask_dict, args.arch, args.dataset, optimizer=None)
        apputils.save_checkpoint(0, args.arch, model, optimizer=None, scheduler=compression_scheduler,
                                 name="{}_thinned".format(args.resumed_checkpoint_path.replace(".pth.tar", "")),
                                 dir=msglogger.logdir)
        msglogger.info("Note: if your model collapsed to random inference, you may want to fine-tune")
        do_exit = True
    return do_exit
def handle_subapps(model, criterion, optimizer, compression_scheduler,
                   pylogger, args):
    if args.transfer or args.dataset in custom_datasets:
        if args.arch == 'resnet34' or 'resnet20_cifar':
            model.module.fc = nn.Linear(model.module.fc.in_features,
                                        args.num_classes)

            if args.resumed_checkpoint_path:
                model, _, _, _ = apputils.load_checkpoint(
                    model,
                    args.resumed_checkpoint_path,
                    model_device=args.device)
            model = model.module
            model = nn.DataParallel(model, device_ids=args.gpus)

            print(model.module.fc)
        elif args.arch == 'vgg16':
            model.classifier[6] = nn.Linear(model.classifier[6].in_features,
                                            args.num_classes)
            if args.resumed_checkpoint_path:
                model, _, _, _ = apputils.load_checkpoint(
                    model,
                    args.resumed_checkpoint_path,
                    model_device=args.device)
            model.to(0)
            print(model.classifier[6])

    def load_test_data(args):
        test_loader = classifier.load_data(args,
                                           load_train=False,
                                           load_val=False,
                                           load_test=True)
        return test_loader

    do_exit = False
    if args.greedy:
        greedy(model, criterion, optimizer, pylogger, args)
        do_exit = True
    elif args.summary:
        # This sample application can be invoked to produce various summary reports
        for summary in args.summary:
            distiller.model_summary(model, summary, args.dataset)
        do_exit = True
    elif args.export_onnx is not None:
        distiller.export_img_classifier_to_onnx(model,
                                                os.path.join(
                                                    msglogger.logdir,
                                                    args.export_onnx),
                                                args.dataset,
                                                add_softmax=True,
                                                verbose=False)
        do_exit = True
    elif args.qe_calibration:
        classifier.acts_quant_stats_collection(model, criterion, pylogger,
                                               args)
        do_exit = True
    elif args.activation_histograms:
        classifier.acts_histogram_collection(model, criterion, pylogger, args)
        do_exit = True
    elif args.sensitivity is not None:
        test_loader = load_test_data(args)
        #sensitivities = np.arange(args.sensitivity_range[0], args.sensitivity_range[1], args.sensitivity_range[2])
        sensitivities = np.arange(*args.sensitivity_range)
        sensitivity_analysis(model, criterion, test_loader, pylogger, args,
                             sensitivities)
        do_exit = True
    elif args.evaluate:
        test_loader = load_test_data(args)
        activations_collectors = classifier.create_activation_stats_collectors(
            model, *args.activation_stats)
        classifier.evaluate_model(model, criterion, test_loader, pylogger,
                                  activations_collectors, args,
                                  compression_scheduler)
        do_exit = True
    elif args.thinnify:
        #zeros_mask_dict = distiller.create_model_masks_dict(model)
        assert args.resumed_checkpoint_path is not None, \
            "You must use --resume-from to provide a checkpoint file to thinnify"
        distiller.remove_filters(model,
                                 compression_scheduler.zeros_mask_dict,
                                 args.arch,
                                 args.dataset,
                                 optimizer=None)
        apputils.save_checkpoint(0,
                                 args.arch,
                                 model,
                                 optimizer=None,
                                 scheduler=compression_scheduler,
                                 name="{}_thinned".format(
                                     args.resumed_checkpoint_path.replace(
                                         ".pth.tar", "")),
                                 dir=msglogger.logdir)
        msglogger.info(
            "Note: if your model collapsed to random inference, you may want to fine-tune"
        )
        do_exit = True
    return do_exit