Пример #1
0
 def test_l1_norm_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = L1NormPruner(model=model,
                           config_list=config_list,
                           mode='dependency_aware',
                           dummy_input=torch.rand(10, 1, 28, 28))
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.79 < sparsity_list[0]['total_sparsity'] < 0.81
Пример #2
0
 def test_lottery_ticket_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = LotteryTicketPruner(model,
                                  config_list,
                                  'level',
                                  3,
                                  log_dir='../../logs')
     pruner.compress()
     _, pruned_model, masks, _, _ = pruner.get_best_result()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.78 < sparsity_list[0]['total_sparsity'] < 0.82
Пример #3
0
 def test_simulated_annealing_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = SimulatedAnnealingPruner(model,
                                       config_list,
                                       evaluator,
                                       start_temperature=40,
                                       log_dir='../../logs')
     pruner.compress()
     _, pruned_model, masks, _, _ = pruner.get_best_result()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.78 < sparsity_list[0]['total_sparsity'] < 0.82
Пример #4
0
 def test_admm_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8, 'rho': 1e-3}]
     pruner = ADMMPruner(model=model,
                         config_list=config_list,
                         trainer=trainer,
                         optimizer=get_optimizer(model),
                         criterion=criterion,
                         iterations=2,
                         training_epochs=1)
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.79 < sparsity_list[0]['total_sparsity'] < 0.81
Пример #5
0
 def test_taylor_fo_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = TaylorFOWeightPruner(model=model,
                                   config_list=config_list,
                                   trainer=trainer,
                                   optimizer=get_optimizer(model),
                                   criterion=criterion,
                                   training_batches=1,
                                   mode='dependency_aware',
                                   dummy_input=torch.rand(10, 1, 28, 28))
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.79 < sparsity_list[0]['total_sparsity'] < 0.81
Пример #6
0
 def test_slim_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['BatchNorm2d'], 'total_sparsity': 0.8}]
     pruner = SlimPruner(model=model,
                         config_list=config_list,
                         trainer=trainer,
                         optimizer=get_optimizer(model),
                         criterion=criterion,
                         training_epochs=1,
                         scale=0.001,
                         mode='global')
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.79 < sparsity_list[0]['total_sparsity'] < 0.81
Пример #7
0
 def test_movement_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = MovementPruner(model=model,
                             config_list=config_list,
                             trainer=trainer,
                             traced_optimizer=get_optimizer(model),
                             criterion=criterion,
                             training_epochs=5,
                             warm_up_step=0,
                             cool_down_beginning_step=4)
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.78 < sparsity_list[0]['total_sparsity'] < 0.82
Пример #8
0
 def test_activation_mean_rank_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     pruner = ActivationMeanRankPruner(
         model=model,
         config_list=config_list,
         trainer=trainer,
         traced_optimizer=get_optimizer(model),
         criterion=criterion,
         training_batches=5,
         activation='relu',
         mode='dependency_aware',
         dummy_input=torch.rand(10, 1, 28, 28))
     pruned_model, masks = pruner.compress()
     pruner._unwrap_model()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks,
                                                   config_list)
     assert 0.78 < sparsity_list[0]['total_sparsity'] < 0.82
Пример #9
0
 def test_auto_compress_pruner(self):
     model = TorchModel()
     config_list = [{'op_types': ['Conv2d'], 'sparsity': 0.8}]
     admm_params = {
         'trainer': trainer,
         'optimizer': get_optimizer(model),
         'criterion': criterion,
         'iterations': 10,
         'training_epochs': 1
     }
     sa_params = {
         'evaluator': evaluator,
         'start_temperature': 40
     }
     pruner = AutoCompressPruner(model, config_list, 10, admm_params, sa_params=sa_params, log_dir='../../../logs')
     pruner.compress()
     _, pruned_model, masks, _, _ = pruner.get_best_result()
     sparsity_list = compute_sparsity_mask2compact(pruned_model, masks, config_list)
     print(sparsity_list)
     assert 0.78 < sparsity_list[0]['total_sparsity'] < 0.82