def log_weights_sparsity(self, model, epoch): params_size = 0 sparse_params_size = 0 for name, param in model.state_dict().items(): if param.dim() in [2, 4]: _density = density(param) params_size += torch.numel(param) sparse_params_size += param.numel() * _density self.tblogger.scalar_summary('sparsity/weights/' + name, sparsity(param)*100, epoch) self.tblogger.scalar_summary('sparsity-2D/weights/' + name, sparsity_2D(param)*100, epoch) self.tblogger.scalar_summary("sparsity/weights/total", 100*(1 - sparse_params_size/params_size), epoch) self.tblogger.sync_to_file()
def __activation_sparsity_cb(module, input, output): """Record the activation sparsity of 'module' This is a callback from the forward() of 'module'. """ module.sparsity.add(sparsity(output.data))