def test_make_linear_constraints(self, cuda=False):
     device = torch.device("cuda") if cuda else torch.device("cpu")
     indices = torch.tensor([1, 2], dtype=torch.long, device=device)
     shapeX = torch.Size([3, 2, 4])
     for dtype in (torch.float, torch.double):
         coefficients = torch.tensor([1.0, 2.0], dtype=dtype, device=device)
         constraints = _make_linear_constraints(
             indices=indices,
             coefficients=coefficients,
             rhs=1.0,
             shapeX=shapeX,
             eq=True,
         )
         self.assertTrue(
             all(
                 set(c.keys()) == {"fun", "jac", "type"}
                 for c in constraints))
         self.assertTrue(all(c["type"] == "eq" for c in constraints))
         self.assertEqual(len(constraints), shapeX[:-1].numel())
         x = np.random.rand(shapeX.numel())
         self.assertEqual(constraints[0]["fun"](x), x[1] + 2 * x[2] - 1.0)
         jac_exp = np.zeros(shapeX.numel())
         jac_exp[[1, 2]] = [1, 2]
         self.assertTrue(np.allclose(constraints[0]["jac"](x), jac_exp))
         self.assertEqual(constraints[-1]["fun"](x),
                          x[-3] + 2 * x[-2] - 1.0)
         jac_exp = np.zeros(shapeX.numel())
         jac_exp[[-3, -2]] = [1, 2]
         self.assertTrue(np.allclose(constraints[-1]["jac"](x), jac_exp))
     # check inequality type
     lcs = _make_linear_constraints(
         indices=torch.tensor([1]),
         coefficients=torch.tensor([1.0]),
         rhs=1.0,
         shapeX=torch.Size([1, 1, 2]),
         eq=False,
     )
     self.assertEqual(len(lcs), 1)
     self.assertEqual(lcs[0]["type"], "ineq")
 def test_make_linear_constraints(self, cuda=False):
     device = torch.device("cuda") if cuda else torch.device("cpu")
     indices = torch.tensor([1, 2], dtype=torch.long, device=device)
     shapeX = torch.Size([3, 2, 4])
     for dtype in (torch.float, torch.double):
         coefficients = torch.tensor([1.0, 2.0], dtype=dtype, device=device)
         constraints = _make_linear_constraints(
             indices=indices,
             coefficients=coefficients,
             rhs=1.0,
             shapeX=shapeX,
             eq=True,
         )
         self.assertTrue(
             all(set(c.keys()) == {"fun", "jac", "type"} for c in constraints)
         )
         self.assertTrue(all(c["type"] == "eq" for c in constraints))
         self.assertEqual(len(constraints), shapeX[:-1].numel())
         x = np.random.rand(shapeX.numel())
         self.assertEqual(constraints[0]["fun"](x), x[1] + 2 * x[2] - 1.0)
         jac_exp = np.zeros(shapeX.numel())
         jac_exp[[1, 2]] = [1, 2]
         self.assertTrue(np.allclose(constraints[0]["jac"](x), jac_exp))
         self.assertEqual(constraints[-1]["fun"](x), x[-3] + 2 * x[-2] - 1.0)
         jac_exp = np.zeros(shapeX.numel())
         jac_exp[[-3, -2]] = [1, 2]
         self.assertTrue(np.allclose(constraints[-1]["jac"](x), jac_exp))
     # check inequality type
     lcs = _make_linear_constraints(
         indices=torch.tensor([1]),
         coefficients=torch.tensor([1.0]),
         rhs=1.0,
         shapeX=torch.Size([1, 1, 2]),
         eq=False,
     )
     self.assertEqual(len(lcs), 1)
     self.assertEqual(lcs[0]["type"], "ineq")
Esempio n. 3
0
    def test_make_linear_constraints(self):
        indices = torch.tensor([1, 2], dtype=torch.long, device=self.device)
        shapeX = torch.Size([3, 2, 4])
        for dtype in (torch.float, torch.double):
            coefficients = torch.tensor([1.0, 2.0],
                                        dtype=dtype,
                                        device=self.device)
            constraints = _make_linear_constraints(
                indices=indices,
                coefficients=coefficients,
                rhs=1.0,
                shapeX=shapeX,
                eq=True,
            )
            self.assertTrue(
                all(
                    set(c.keys()) == {"fun", "jac", "type"}
                    for c in constraints))
            self.assertTrue(all(c["type"] == "eq" for c in constraints))
            self.assertEqual(len(constraints), shapeX[:-1].numel())
            x = np.random.rand(shapeX.numel())
            self.assertEqual(constraints[0]["fun"](x), x[1] + 2 * x[2] - 1.0)
            jac_exp = np.zeros(shapeX.numel())
            jac_exp[[1, 2]] = [1, 2]
            self.assertTrue(np.allclose(constraints[0]["jac"](x), jac_exp))
            self.assertEqual(constraints[-1]["fun"](x),
                             x[-3] + 2 * x[-2] - 1.0)
            jac_exp = np.zeros(shapeX.numel())
            jac_exp[[-3, -2]] = [1, 2]
            self.assertTrue(np.allclose(constraints[-1]["jac"](x), jac_exp))
        # check inequality type
        lcs = _make_linear_constraints(
            indices=torch.tensor([1]),
            coefficients=torch.tensor([1.0]),
            rhs=1.0,
            shapeX=torch.Size([1, 1, 2]),
            eq=False,
        )
        self.assertEqual(len(lcs), 1)
        self.assertEqual(lcs[0]["type"], "ineq")

        # check constraint across q-batch
        indices = torch.tensor([[0, 3], [1, 2]],
                               dtype=torch.long,
                               device=self.device)
        shapeX = torch.Size([3, 2, 4])
        for dtype in (torch.float, torch.double):
            coefficients = torch.tensor([1.0, 2.0],
                                        dtype=dtype,
                                        device=self.device)
            constraints = _make_linear_constraints(
                indices=indices,
                coefficients=coefficients,
                rhs=1.0,
                shapeX=shapeX,
                eq=True,
            )
            self.assertTrue(
                all(
                    set(c.keys()) == {"fun", "jac", "type"}
                    for c in constraints))
            self.assertTrue(all(c["type"] == "eq" for c in constraints))
            self.assertEqual(len(constraints), shapeX[0])
            x = np.random.rand(shapeX.numel())
            offsets = [shapeX[i:].numel() for i in range(1, len(shapeX))]
            # rule is [i, j, k] is i * offset[0] + j * offset[1] + k
            for i in range(shapeX[0]):
                pos1 = i * offsets[0] + 3
                pos2 = i * offsets[0] + 1 * offsets[1] + 2
                self.assertEqual(constraints[i]["fun"](x),
                                 x[pos1] + 2 * x[pos2] - 1.0)
                jac_exp = np.zeros(shapeX.numel())
                jac_exp[[pos1, pos2]] = [1, 2]
                self.assertTrue(np.allclose(constraints[i]["jac"](x), jac_exp))
        # make sure error is raised for scalar tensors
        with self.assertRaises(ValueError):
            constraints = _make_linear_constraints(
                indices=torch.tensor(0),
                coefficients=torch.tensor([1.0]),
                rhs=1.0,
                shapeX=torch.Size([1, 1, 2]),
                eq=False,
            )