Пример #1
0
    def test_oracle(self):
        cache_refs = [{
            'dw': torch.tensor([[0.7357], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3400)
        }, {
            'dw': torch.tensor([[0.7414], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3397)
        }, {
            'dw': torch.tensor([[0.7414], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3397)
        }, {
            'dw': torch.tensor([[0.7414], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3397)
        }, {
            'dw': torch.tensor([[0.7414], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3397)
        }]
        temps = [1e-1, 1e-2, 1e-3, 1e-4, 1e-5]

        for temp, cache_ref in zip(temps, cache_refs):
            self.hparams.temp = temp
            self.obj = SmoothedLasso_Gradient(self.hparams)
            cache_test = self.obj.oracle(self.w, self.x, self.y)
            assert_all_close_dict(
                cache_ref, cache_test,
                "oracle_info with parameter temp={}".format(temp))
            if self.w.grad is not None:
                self.w.grad.zero_()
Пример #2
0
class TestObj_SmoothedLasso_Gradient(unittest.TestCase):
    def setUp(self):
        _init_lasso(self)
        self.obj = SmoothedLasso_Gradient(self.hparams)

    def test_error(self):
        error_test = self.obj.task_error(self.w, self.x, self.y)
        error_ref = torch.tensor(1.3251)
        assert_all_close(error_test, error_ref, "task_error returned value")

    def test_oracle(self):
        cache_refs = [{
            'dw': torch.tensor([[0.7357], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3400)
        }, {
            'dw': torch.tensor([[0.7319], [1.4774], [-0.3645]]),
            'obj': torch.tensor(1.3511)
        }, {
            'dw': torch.tensor([[0.7315], [1.4740], [-0.3579]]),
            'obj': torch.tensor(1.5336)
        }]
        temps = [0.1, 1, 10]

        for temp, cache_ref in zip(temps, cache_refs):
            self.hparams.temp = temp
            self.obj = SmoothedLasso_Gradient(self.hparams)
            cache_test = self.obj.oracle(self.w, self.x, self.y)
            assert_all_close_dict(
                cache_ref, cache_test,
                "oracle_info with parameter temp={}".format(temp))
            if self.w.grad is not None:
                self.w.grad.zero_()
Пример #3
0
    def test_oracle(self):
        cache_refs = [{
            'dw': torch.tensor([[0.7357], [1.4836], [-0.3669]]),
            'obj': torch.tensor(1.3400)},{
            'dw': torch.tensor([[0.7319], [1.4774], [-0.3645]]),
            'obj': torch.tensor(1.3511)},{
            'dw': torch.tensor([[0.7315], [1.4740], [-0.3579]]),
            'obj': torch.tensor(1.5336)}
        ]
        temps = [0.1, 1, 10]

        for temp, cache_ref in zip(temps, cache_refs):
            self.hparams.temp = temp
            self.obj = SmoothedLasso_Gradient(self.hparams)
            cache_test = self.obj.oracle(self.w, self.x, self.y)
            assert_all_close_dict(cache_ref, cache_test, "oracle_info with parameter temp={}".format(temp))
            self.w.grad.zero_()
Пример #4
0
def get_objective(args, hparams):
    print('Reg Coefficient mu: \t {}'.format(args.mu))

    if args.obj == 'svm':
        if args.opt in ('sgd', 'gd'):
            obj = SVM_SubGradient(hparams)
        elif args.opt == 'bcfw':
            obj = SVM_ConditionalGradient(hparams)
        else:
            raise ValueError

    elif args.obj == 'logistic':
        if args.opt in ('sgd', 'gd'):
            obj = Logistic_Gradient(hparams)
        else:
            raise ValueError

    elif args.obj == 'ridge':
        if args.opt in ('sgd', 'gd'):
            obj = Ridge_Gradient(hparams)
        elif args.opt == 'closed-form':
            obj = Ridge_ClosedForm(hparams)
        else:
            raise ValueError

    elif args.obj == 'lasso':
        if args.opt in ('sgd', 'gd'):
            obj = Lasso_subGradient(hparams)
        else:
            raise ValueError

    elif args.obj == 'smooth-lasso':
        if args.opt in ('sgd', 'gd'):
            obj = SmoothedLasso_Gradient(hparams)
        else:
            raise ValueError

    else:
        raise ValueError(
            'Did not recognize objective {obj}'.format(obj=args.obj))

    return obj
Пример #5
0
 def setUp(self):
     _init_lasso(self)
     self.obj = SmoothedLasso_Gradient(self.hparams)