def pending_test_inv_matmul():
    left_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]]))
    left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]]))
    right_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]]))
    right_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]]))

    base_lazy_variable_mat = torch.randn(6, 6)
    base_lazy_variable_mat = base_lazy_variable_mat.t().matmul(base_lazy_variable_mat)
    base_lazy_variable = NonLazyVariable(Variable(base_lazy_variable_mat))
    test_matrix = torch.randn(3, 4)

    interp_lazy_var = InterpolatedLazyVariable(base_lazy_variable, left_interp_indices, left_interp_values,
                                               right_interp_indices, right_interp_values)
    res = interp_lazy_var.inv_matmul(Variable(test_matrix)).data

    left_matrix = torch.Tensor([
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
    ])
    right_matrix = torch.Tensor([
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
    ])
    actual_mat = Variable(left_matrix.matmul(base_lazy_variable_mat).matmul(right_matrix.t()))
    actual = gpytorch.inv_matmul(actual_mat, Variable(test_matrix)).data
    assert approx_equal(res, actual)
def test_exact_posterior():
    train_mean = Variable(torch.randn(4))
    train_y = Variable(torch.randn(4))
    test_mean = Variable(torch.randn(4))

    # Test case
    c1_var = Variable(torch.Tensor([5, 1, 2, 0]), requires_grad=True)
    c2_var = Variable(torch.Tensor([6, 0, 1, -1]), requires_grad=True)
    indices = Variable(torch.arange(0, 4).long().view(4, 1))
    values = Variable(torch.ones(4).view(4, 1))
    toeplitz_1 = InterpolatedLazyVariable(ToeplitzLazyVariable(c1_var),
                                          indices, values, indices, values)
    toeplitz_2 = InterpolatedLazyVariable(ToeplitzLazyVariable(c2_var),
                                          indices, values, indices, values)
    sum_lv = toeplitz_1 + toeplitz_2

    # Actual case
    actual = sum_lv.evaluate()

    # Test forward
    actual_alpha = gpytorch.posterior_strategy(actual).exact_posterior_alpha(
        train_mean, train_y)
    actual_mean = gpytorch.posterior_strategy(actual).exact_posterior_mean(
        test_mean, actual_alpha)
    sum_lv_alpha = sum_lv.posterior_strategy().exact_posterior_alpha(
        train_mean, train_y)
    sum_lv_mean = sum_lv.posterior_strategy().exact_posterior_mean(
        test_mean, sum_lv_alpha)
    assert (torch.norm(actual_mean.data - sum_lv_mean.data) < 1e-4)
    def test_batch_diag(self):
        left_interp_indices = Variable(
            torch.LongTensor([[2, 3], [3, 4], [4, 5]]).repeat(5, 1, 1))
        left_interp_values = Variable(
            torch.Tensor([[1, 1], [1, 1], [1, 1]]).repeat(5, 1, 1))
        right_interp_indices = Variable(
            torch.LongTensor([[0, 1], [1, 2], [2, 3]]).repeat(5, 1, 1))
        right_interp_values = Variable(
            torch.Tensor([[1, 1], [1, 1], [1, 1]]).repeat(5, 1, 1))

        base_lazy_variable_mat = torch.randn(5, 6, 6)
        base_lazy_variable_mat = base_lazy_variable_mat.transpose(
            1, 2).matmul(base_lazy_variable_mat)

        base_lazy_variable = NonLazyVariable(
            Variable(base_lazy_variable_mat, requires_grad=True))
        interp_lazy_var = InterpolatedLazyVariable(base_lazy_variable,
                                                   left_interp_indices,
                                                   left_interp_values,
                                                   right_interp_indices,
                                                   right_interp_values)

        actual = interp_lazy_var.evaluate()
        actual_diag = torch.stack([
            actual[0].diag(), actual[1].diag(), actual[2].diag(),
            actual[3].diag(), actual[4].diag()
        ])

        self.assertTrue(
            approx_equal(actual_diag.data,
                         interp_lazy_var.diag().data))
def test_derivatives():
    left_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]])).repeat(5, 3, 1)
    left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]])).repeat(5, 3, 1)
    right_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]])).repeat(5, 3, 1)
    right_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]])).repeat(5, 3, 1)

    base_lazy_variable_mat = torch.randn(5, 6, 6)
    base_lazy_variable_mat = base_lazy_variable_mat.transpose(1, 2).matmul(base_lazy_variable_mat)
    test_matrix = Variable(torch.randn(1, 9, 4))

    base_lazy_variable = NonLazyVariable(Variable(base_lazy_variable_mat, requires_grad=True))
    interp_lazy_var = InterpolatedLazyVariable(base_lazy_variable, left_interp_indices, left_interp_values,
                                               right_interp_indices, right_interp_values)
    res = interp_lazy_var.matmul(test_matrix)
    res.sum().backward()

    base_lazy_variable2 = Variable(base_lazy_variable_mat, requires_grad=True)
    left_matrix = torch.Tensor([
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
    ]).repeat(5, 1, 1)
    actual = Variable(left_matrix).matmul(base_lazy_variable2).matmul(Variable(left_matrix).transpose(-1, -2))
    actual = actual.matmul(test_matrix)
    actual.sum().backward()

    assert approx_equal(base_lazy_variable.var.grad.data, base_lazy_variable2.grad.data)
Exemplo n.º 5
0
    def test_diag(self):
        left_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]]))
        left_interp_values = Variable(torch.Tensor([[1, 1], [1, 1], [1, 1]]))
        right_interp_indices = Variable(torch.LongTensor([[0, 1], [1, 2], [2, 3]]))
        right_interp_values = Variable(torch.Tensor([[1, 1], [1, 1], [1, 1]]))

        base_lazy_variable_mat = torch.randn(6, 6)
        base_lazy_variable_mat = (
            base_lazy_variable_mat.t().
            matmul(base_lazy_variable_mat)
        )

        base_lazy_variable = NonLazyVariable(
            Variable(base_lazy_variable_mat, requires_grad=True)
        )
        interp_lazy_var = InterpolatedLazyVariable(
            base_lazy_variable,
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )

        actual = interp_lazy_var.evaluate()
        self.assertTrue(approx_equal(actual.diag().data, interp_lazy_var.diag().data))
    def test_inv_matmul(self):
        base_lazy_variable_mat = torch.randn(6, 6)
        base_lazy_variable_mat = base_lazy_variable_mat.t().matmul(
            base_lazy_variable_mat)
        test_matrix = torch.randn(3, 4)

        left_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4],
                                                         [4, 5]]),
                                       requires_grad=True)
        left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]]),
                                      requires_grad=True)
        right_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4],
                                                          [4, 5]]),
                                        requires_grad=True)
        right_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1,
                                                                        3]]),
                                       requires_grad=True)
        left_interp_values_copy = Variable(left_interp_values.data,
                                           requires_grad=True)
        right_interp_values_copy = Variable(right_interp_values.data,
                                            requires_grad=True)

        base_lazy_variable = Variable(base_lazy_variable_mat,
                                      requires_grad=True)
        base_lazy_variable_copy = Variable(base_lazy_variable_mat,
                                           requires_grad=True)
        test_matrix_var = Variable(test_matrix, requires_grad=True)
        test_matrix_var_copy = Variable(test_matrix, requires_grad=True)

        interp_lazy_var = InterpolatedLazyVariable(
            NonLazyVariable(base_lazy_variable),
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )
        res = interp_lazy_var.inv_matmul(test_matrix_var)

        left_matrix = Variable(torch.zeros(3, 6))
        right_matrix = Variable(torch.zeros(3, 6))
        left_matrix.scatter_(1, left_interp_indices, left_interp_values_copy)
        right_matrix.scatter_(1, right_interp_indices,
                              right_interp_values_copy)
        actual_mat = left_matrix.matmul(base_lazy_variable_copy).matmul(
            right_matrix.transpose(-1, -2))
        actual = gpytorch.inv_matmul(actual_mat, test_matrix_var_copy)

        self.assertTrue(approx_equal(res.data, actual.data))

        # Backward pass
        res.sum().backward()
        actual.sum().backward()

        self.assertTrue(
            approx_equal(base_lazy_variable.grad.data,
                         base_lazy_variable_copy.grad.data))
        self.assertTrue(
            approx_equal(left_interp_values.grad.data,
                         left_interp_values_copy.grad.data))
Exemplo n.º 7
0
def test_batch_matmul():
    left_interp_indices = Variable(
        torch.LongTensor([[2, 3], [3, 4], [4, 5]]).repeat(5, 3, 1))
    left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1],
                                                [1, 3]]).repeat(5, 3, 1),
                                  requires_grad=True)
    left_interp_values_copy = Variable(left_interp_values.data,
                                       requires_grad=True)
    right_interp_indices = Variable(
        torch.LongTensor([[0, 1], [1, 2], [2, 3]]).repeat(5, 3, 1))
    right_interp_values = Variable(torch.Tensor([[1, 2], [2, 0.5],
                                                 [1, 3]]).repeat(5, 3, 1),
                                   requires_grad=True)
    right_interp_values_copy = Variable(right_interp_values.data,
                                        requires_grad=True)

    base_lazy_variable_mat = torch.randn(5, 6, 6)
    base_lazy_variable_mat = base_lazy_variable_mat.transpose(
        -1, -2).matmul(base_lazy_variable_mat)
    base_variable = Variable(base_lazy_variable_mat, requires_grad=True)
    base_variable_copy = Variable(base_lazy_variable_mat, requires_grad=True)
    base_lazy_variable = NonLazyVariable(base_variable)

    test_matrix = torch.randn(5, 9, 4)

    interp_lazy_var = InterpolatedLazyVariable(base_lazy_variable,
                                               left_interp_indices,
                                               left_interp_values,
                                               right_interp_indices,
                                               right_interp_values)
    res = interp_lazy_var.matmul(Variable(test_matrix))

    left_matrix_comps = []
    right_matrix_comps = []
    for i in range(5):
        left_matrix_comp = Variable(torch.zeros(9, 6))
        right_matrix_comp = Variable(torch.zeros(9, 6))
        left_matrix_comp.scatter_(1, left_interp_indices[i],
                                  left_interp_values_copy[i])
        right_matrix_comp.scatter_(1, right_interp_indices[i],
                                   right_interp_values_copy[i])
        left_matrix_comps.append(left_matrix_comp.unsqueeze(0))
        right_matrix_comps.append(right_matrix_comp.unsqueeze(0))
    left_matrix = torch.cat(left_matrix_comps)
    right_matrix = torch.cat(right_matrix_comps)

    actual = left_matrix.matmul(base_variable_copy).matmul(
        right_matrix.transpose(-1, -2))
    actual = actual.matmul(Variable(test_matrix))
    assert approx_equal(res.data, actual.data)

    res.sum().backward()
    actual.sum().backward()

    assert approx_equal(base_variable.grad.data, base_variable_copy.grad.data)
    assert approx_equal(left_interp_values.grad.data,
                        left_interp_values_copy.grad.data)
    def test_matmul(self):
        left_interp_indices = Variable(
            torch.LongTensor([[2, 3], [3, 4], [4, 5]]).repeat(3, 1))
        left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1],
                                                    [1, 3]]).repeat(3, 1),
                                      requires_grad=True)
        left_interp_values_copy = Variable(left_interp_values.data,
                                           requires_grad=True)
        right_interp_indices = Variable(
            torch.LongTensor([[0, 1], [1, 2], [2, 3]]).repeat(3, 1))
        right_interp_values = Variable(torch.Tensor([[1, 2], [2, 0.5],
                                                     [1, 3]]).repeat(3, 1),
                                       requires_grad=True)
        right_interp_values_copy = Variable(right_interp_values.data,
                                            requires_grad=True)

        base_lazy_variable_mat = torch.randn(6, 6)
        base_lazy_variable_mat = (
            base_lazy_variable_mat.t().matmul(base_lazy_variable_mat))
        base_variable = Variable(base_lazy_variable_mat, requires_grad=True)
        base_variable_copy = Variable(base_lazy_variable_mat,
                                      requires_grad=True)
        base_lazy_variable = NonLazyVariable(base_variable)

        test_matrix = torch.randn(9, 4)

        interp_lazy_var = InterpolatedLazyVariable(
            base_lazy_variable,
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )
        res = interp_lazy_var.matmul(Variable(test_matrix))

        left_matrix = Variable(torch.zeros(9, 6))
        right_matrix = Variable(torch.zeros(9, 6))
        left_matrix.scatter_(1, left_interp_indices, left_interp_values_copy)
        right_matrix.scatter_(1, right_interp_indices,
                              right_interp_values_copy)

        actual = (left_matrix.matmul(base_variable_copy).matmul(
            right_matrix.t()).matmul(Variable(test_matrix)))
        self.assertTrue(approx_equal(res.data, actual.data))

        res.sum().backward()
        actual.sum().backward()

        self.assertTrue(
            approx_equal(base_variable.grad.data,
                         base_variable_copy.grad.data))
        self.assertTrue(
            approx_equal(left_interp_values.grad.data,
                         left_interp_values_copy.grad.data))
    def test_matmul_batch(self):
        left_interp_indices = Variable(
            torch.LongTensor([[2, 3], [3, 4], [4, 5]])).repeat(5, 3, 1)
        left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1],
                                                    [1, 3]])).repeat(5, 3, 1)
        right_interp_indices = Variable(
            torch.LongTensor([[0, 1], [1, 2], [2, 3]])).repeat(5, 3, 1)
        right_interp_values = Variable(torch.Tensor([[1, 2], [2, 0.5],
                                                     [1, 3]])).repeat(5, 3, 1)

        base_lazy_variable_mat = torch.randn(5, 6, 6)
        base_lazy_variable_mat = (base_lazy_variable_mat.transpose(
            1, 2).matmul(base_lazy_variable_mat))
        test_matrix = Variable(torch.randn(1, 9, 4))

        base_lazy_variable = NonLazyVariable(
            Variable(base_lazy_variable_mat, requires_grad=True))
        interp_lazy_var = InterpolatedLazyVariable(
            base_lazy_variable,
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )
        res = interp_lazy_var.matmul(test_matrix)

        left_matrix = torch.Tensor([
            [0, 0, 1, 2, 0, 0],
            [0, 0, 0, 0.5, 1, 0],
            [0, 0, 0, 0, 1, 3],
            [0, 0, 1, 2, 0, 0],
            [0, 0, 0, 0.5, 1, 0],
            [0, 0, 0, 0, 1, 3],
            [0, 0, 1, 2, 0, 0],
            [0, 0, 0, 0.5, 1, 0],
            [0, 0, 0, 0, 1, 3],
        ]).repeat(5, 1, 1)

        right_matrix = torch.Tensor([
            [1, 2, 0, 0, 0, 0],
            [0, 2, 0.5, 0, 0, 0],
            [0, 0, 1, 3, 0, 0],
            [1, 2, 0, 0, 0, 0],
            [0, 2, 0.5, 0, 0, 0],
            [0, 0, 1, 3, 0, 0],
            [1, 2, 0, 0, 0, 0],
            [0, 2, 0.5, 0, 0, 0],
            [0, 0, 1, 3, 0, 0],
        ]).repeat(5, 1, 1)
        actual = (left_matrix.matmul(base_lazy_variable_mat).matmul(
            right_matrix.transpose(-1, -2)).matmul(test_matrix.data))

        self.assertTrue(approx_equal(res.data, actual))
Exemplo n.º 10
0
def test_exact_posterior():
    train_mean = Variable(torch.randn(4))
    train_y = Variable(torch.randn(4))
    test_mean = Variable(torch.randn(4))

    # Test case
    c1_var = Variable(torch.Tensor([5, 1, 2, 0]), requires_grad=True)
    c2_var = Variable(torch.Tensor([[6, 0], [1, -1]]), requires_grad=True)
    c3_var = Variable(torch.Tensor([7, 2, 1, 0]), requires_grad=True)
    indices_1 = torch.arange(0, 4).long().view(4, 1)
    values_1 = torch.ones(4).view(4, 1)
    indices_2 = torch.arange(0, 2).expand(4, 2).long().view(2, 4, 1)
    values_2 = torch.ones(8).view(2, 4, 1)
    indices_3 = torch.arange(0, 4).long().view(4, 1)
    values_3 = torch.ones(4).view(4, 1)
    toeplitz_1 = InterpolatedLazyVariable(ToeplitzLazyVariable(c1_var),
                                          Variable(indices_1),
                                          Variable(values_1),
                                          Variable(indices_1),
                                          Variable(values_1))
    kronecker_product = KroneckerProductLazyVariable(c2_var, indices_2,
                                                     values_2, indices_2,
                                                     values_2)
    toeplitz_2 = InterpolatedLazyVariable(ToeplitzLazyVariable(c3_var),
                                          Variable(indices_3),
                                          Variable(values_3),
                                          Variable(indices_3),
                                          Variable(values_3))
    mul_lv = toeplitz_1 * kronecker_product * toeplitz_2

    # Actual case
    actual = mul_lv.evaluate()
    # Test forward
    actual_alpha = gpytorch.posterior_strategy(actual).exact_posterior_alpha(
        train_mean, train_y)
    actual_mean = gpytorch.posterior_strategy(actual).exact_posterior_mean(
        test_mean, actual_alpha)
    mul_lv_alpha = mul_lv.posterior_strategy().exact_posterior_alpha(
        train_mean, train_y)
    mul_lv_mean = mul_lv.posterior_strategy().exact_posterior_mean(
        test_mean, mul_lv_alpha)
    assert (torch.norm(actual_mean.data - mul_lv_mean.data) < 1e-3)
def test_matmul():
    left_interp_indices = Variable(torch.LongTensor([[2, 3], [3, 4], [4, 5]])).repeat(3, 1)
    left_interp_values = Variable(torch.Tensor([[1, 2], [0.5, 1], [1, 3]])).repeat(3, 1)
    right_interp_indices = Variable(torch.LongTensor([[0, 1], [1, 2], [2, 3]])).repeat(3, 1)
    right_interp_values = Variable(torch.Tensor([[1, 2], [2, 0.5], [1, 3]])).repeat(3, 1)

    base_lazy_variable_mat = torch.randn(6, 6)
    base_lazy_variable_mat = base_lazy_variable_mat.t().matmul(base_lazy_variable_mat)
    base_lazy_variable = NonLazyVariable(Variable(base_lazy_variable_mat))

    test_matrix = torch.randn(9, 4)
    test_matrix = torch.ones(9)

    interp_lazy_var = InterpolatedLazyVariable(base_lazy_variable, left_interp_indices, left_interp_values,
                                               right_interp_indices, right_interp_values)
    res = interp_lazy_var.matmul(Variable(test_matrix)).data

    left_matrix = torch.Tensor([
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
        [0, 0, 1, 2, 0, 0],
        [0, 0, 0, 0.5, 1, 0],
        [0, 0, 0, 0, 1, 3],
    ])
    right_matrix = torch.Tensor([
        [1, 2, 0, 0, 0, 0],
        [0, 2, 0.5, 0, 0, 0],
        [0, 0, 1, 3, 0, 0],
        [1, 2, 0, 0, 0, 0],
        [0, 2, 0.5, 0, 0, 0],
        [0, 0, 1, 3, 0, 0],
        [1, 2, 0, 0, 0, 0],
        [0, 2, 0.5, 0, 0, 0],
        [0, 0, 1, 3, 0, 0],
    ])
    actual = left_matrix.matmul(base_lazy_variable_mat).matmul(right_matrix.t()).matmul(test_matrix)
    assert approx_equal(res, actual)
    def test_getitem_batch(self):
        left_interp_indices = Variable(
            torch.LongTensor([[2, 3], [3, 4], [4, 5]]).repeat(5, 1, 1))
        left_interp_values = Variable(
            torch.Tensor([[1, 1], [1, 1], [1, 1]]).repeat(5, 1, 1))
        right_interp_indices = Variable(
            torch.LongTensor([[0, 1], [1, 2], [2, 3]]).repeat(5, 1, 1))
        right_interp_values = Variable(
            torch.Tensor([[1, 1], [1, 1], [1, 1]]).repeat(5, 1, 1))

        base_lazy_variable_mat = torch.randn(5, 6, 6)
        base_lazy_variable_mat = (base_lazy_variable_mat.transpose(
            1, 2).matmul(base_lazy_variable_mat))

        base_lazy_variable = NonLazyVariable(
            Variable(base_lazy_variable_mat, requires_grad=True))
        interp_lazy_var = InterpolatedLazyVariable(
            base_lazy_variable,
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )

        actual = (base_lazy_variable[:, 2:5, 0:3] +
                  base_lazy_variable[:, 2:5, 1:4] +
                  base_lazy_variable[:, 3:6, 0:3] +
                  base_lazy_variable[:, 3:6, 1:4]).evaluate()

        self.assertTrue(
            approx_equal(interp_lazy_var[2].evaluate().data, actual[2].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[0:2].evaluate().data,
                         actual[0:2].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[:, 2:3].evaluate().data,
                         actual[:, 2:3].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[:, 0:2].evaluate().data,
                         actual[:, 0:2].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[1, :1, :2].evaluate().data,
                         actual[1, :1, :2].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[1, 1, :2].data, actual[1,
                                                                1, :2].data))
        self.assertTrue(
            approx_equal(interp_lazy_var[1, :1, 2].data, actual[1, :1,
                                                                2].data))
    def test_inv_matmul_batch(self):
        base_lazy_variable_mat = torch.randn(6, 6)
        base_lazy_variable_mat = ((base_lazy_variable_mat.t().matmul(
            base_lazy_variable_mat)).unsqueeze(0).repeat(5, 1, 1))
        test_matrix = torch.randn(5, 3, 4)

        left_interp_indices = Variable(torch.LongTensor(
            [[2, 3], [3, 4], [4, 5]]).unsqueeze(0).repeat(5, 1, 1),
                                       requires_grad=True)
        left_interp_values = Variable(torch.Tensor(
            [[1, 2], [0.5, 1], [1, 3]]).unsqueeze(0).repeat(5, 1, 1),
                                      requires_grad=True)
        right_interp_indices = Variable(torch.LongTensor(
            [[2, 3], [3, 4], [4, 5]]).unsqueeze(0).repeat(5, 1, 1),
                                        requires_grad=True)
        right_interp_values = Variable(torch.Tensor(
            [[1, 2], [0.5, 1], [1, 3]]).unsqueeze(0).repeat(5, 1, 1),
                                       requires_grad=True)
        left_interp_values_copy = Variable(left_interp_values.data,
                                           requires_grad=True)
        right_interp_values_copy = Variable(right_interp_values.data,
                                            requires_grad=True)

        base_lazy_variable = Variable(base_lazy_variable_mat,
                                      requires_grad=True)
        base_lazy_variable_copy = Variable(base_lazy_variable_mat,
                                           requires_grad=True)
        test_matrix_var = Variable(test_matrix, requires_grad=True)
        test_matrix_var_copy = Variable(test_matrix, requires_grad=True)

        interp_lazy_var = InterpolatedLazyVariable(
            NonLazyVariable(base_lazy_variable),
            left_interp_indices,
            left_interp_values,
            right_interp_indices,
            right_interp_values,
        )
        res = interp_lazy_var.inv_matmul(test_matrix_var)

        left_matrix_comps = []
        right_matrix_comps = []
        for i in range(5):
            left_matrix_comp = Variable(torch.zeros(3, 6))
            right_matrix_comp = Variable(torch.zeros(3, 6))
            left_matrix_comp.scatter_(1, left_interp_indices[i],
                                      left_interp_values_copy[i])
            right_matrix_comp.scatter_(1, right_interp_indices[i],
                                       right_interp_values_copy[i])
            left_matrix_comps.append(left_matrix_comp.unsqueeze(0))
            right_matrix_comps.append(right_matrix_comp.unsqueeze(0))
        left_matrix = torch.cat(left_matrix_comps)
        right_matrix = torch.cat(right_matrix_comps)
        actual_mat = left_matrix.matmul(base_lazy_variable_copy).matmul(
            right_matrix.transpose(-1, -2))
        actual = gpytorch.inv_matmul(actual_mat, test_matrix_var_copy)

        self.assertTrue(approx_equal(res.data, actual.data))

        # Backward pass
        res.sum().backward()
        actual.sum().backward()

        self.assertTrue(
            approx_equal(base_lazy_variable.grad.data,
                         base_lazy_variable_copy.grad.data))
        self.assertTrue(
            approx_equal(left_interp_values.grad.data,
                         left_interp_values_copy.grad.data))