Ejemplo n.º 1
0
    def test_net_comparison(self):
        # (a + b) * (c + d) == a * c + a * d + b * c + b * d
        net1 = core.Net("net1")
        a, b, c, d = net1.AddExternalInputs("a", "b", "c", "d")
        a_b = net1.Sum([a, b], "a+b")
        c_d = net1.Sum([c, d], "c+d")
        x = net1.Mul([a_b, c_d], "x")

        net2 = core.Net("net2")
        ac = net2.Mul([a, c], "ac")
        ad = net2.Mul([a, d], "ad")
        bc = net2.Mul([b, c], "bc")
        bd = net2.Mul([b, d], "bd")
        y = net2.Sum([ac, ad, bc, bd], "y")

        input_values = {
            blob: np.array([i], dtype=np.float32)
            for i, blob in enumerate([a, b, c, d])
        }

        NetGradientChecker.CompareNets(
            [net1, net2],
            [[x], [y]],
            [0],
            inputs_with_grads=[a, b, c, d],
            input_values=input_values,
        )
Ejemplo n.º 2
0
    def test_net_gradient_checker(self):
        model = model_helper.ModelHelper(name="test")
        const = model.net.AddExternalInputs("const1", "const2")
        fc = brew.fc(model, dim_in=3, dim_out=4, blob_in="X", blob_out="Y", axis=0)
        dist = [model.net.SquaredL2Distance([fc, c]) for c in const]
        losses = [model.net.AveragedLoss(d) for d in dist]  # using two losses here

        workspace.RunNetOnce(model.param_init_net)
        NetGradientChecker.Check(
            model.net,
            outputs_with_grad=losses,
            input_values={"X": np.array([1, 2, 3], dtype="float32"),
                          const[0]: np.array([1, 1, 1, 1], dtype="float32"),
                          const[1]: np.array([2, 2, 2, 2], dtype="float32")},
            input_to_check="X",
        )