def prune_finetune_test(iteration, model_cpy, pruning_step, test_fn, train_fn,
                        app_args, param_name, effective_train_size):
    pylogger = PythonLogger(msglogger)
    zeros_mask_dict = distiller.create_model_masks_dict(model_cpy)
    param = model_cpy.state_dict()[param_name]
    if 0 == prune_tensor(param, param_name, pruning_step, zeros_mask_dict):
        return (-1, -1, -1, zeros_mask_dict)  # Did not prune anything

    if train_fn is not None:
        # Fine-tune
        optimizer = torch.optim.SGD(model_cpy.parameters(),
                                    lr=app_args.lr,
                                    momentum=app_args.momentum,
                                    weight_decay=app_args.weight_decay)
        app_args.effective_train_size = effective_train_size
        train_fn(model=model_cpy,
                 compression_scheduler=create_scheduler(
                     model_cpy, zeros_mask_dict),
                 optimizer=optimizer,
                 epoch=iteration,
                 loggers=[pylogger])

    # Physically remove filters
    dataset = app_args.dataset
    arch = app_args.arch
    distiller.remove_filters(model_cpy,
                             zeros_mask_dict,
                             arch,
                             dataset,
                             optimizer=None)

    # Test and record the performance of the pruned model
    prec1, prec5, loss = test_fn(model=model_cpy, loggers=None)
    return (prec1, prec5, loss, zeros_mask_dict)
Пример #2
0
def test_load_gpu_model_on_cpu_with_thinning():
    # Issue #148
    # 1. create a GPU model and remove 50% of the filters in one of the layers (thninning)
    # 2. save the thinned model in a checkpoint file
    # 3. load the checkpoint and place it on the CPU
    CPU_DEVICE_ID = -1
    gpu_model = create_model(False, 'cifar10', 'resnet20_cifar')
    conv_pname = "module.layer1.0.conv1.weight"
    conv_p = distiller.model_find_param(gpu_model, conv_pname)
    pruner = distiller.pruning.L1RankedStructureParameterPruner("test_pruner", group_type="Filters",
                                                                desired_sparsity=0.5, weights=conv_pname)
    zeros_mask_dict = distiller.create_model_masks_dict(gpu_model)
    pruner.set_param_mask(conv_p, conv_pname, zeros_mask_dict, meta=None)

    # Use the mask to prune
    zeros_mask_dict[conv_pname].apply_mask(conv_p)
    distiller.remove_filters(gpu_model, zeros_mask_dict, 'resnet20_cifar', 'cifar10', optimizer=None)
    assert hasattr(gpu_model, 'thinning_recipes')
    scheduler = distiller.CompressionScheduler(gpu_model)
    save_checkpoint(epoch=0, arch='resnet20_cifar', model=gpu_model, scheduler=scheduler, optimizer=None)

    CPU_DEVICE_ID = -1
    cpu_model = create_model(False, 'cifar10', 'resnet20_cifar', device_ids=CPU_DEVICE_ID)
    load_checkpoint(cpu_model, "checkpoint.pth.tar")
    assert distiller.model_device(cpu_model) == 'cpu'
Пример #3
0
 def reset(self, init_only=False):
     """Reset the environment.
     This is invoked by the Agent.
     """
     msglogger.info("Resetting the environment (init_only={})".format(init_only))
     self.current_layer_id = -1
     self.prev_action = 0
     self.model = copy.deepcopy(self.orig_model)
     self.zeros_mask_dict = distiller.create_model_masks_dict(self.model)
     self._remaining_macs = self.dense_model_macs
     self._removed_macs = 0
     if init_only:
         return
     obs, _, _, _, = self.step(0)
     return obs
Пример #4
0
 def reset(self, model):
     self.model = model
     self.zeros_mask_dict = distiller.create_model_masks_dict(self.model)
Пример #5
0
 def reset(self, model):
     self.model = model
     self.zeros_mask_dict = distiller.create_model_masks_dict(self.model)
     self.model_metadata = copy.deepcopy(self.cached_model_metadata)