Пример #1
0
def test_input_elementwise_sum_node():
    for s in [(),
              (3, 4, 5)]:
        network = tn.ContainerNode(
            "all",
            [tn.InputElementwiseSumNode("ies"),
             tn.SequentialNode(
                 "seq1",
                [tn.InputNode("i1", shape=s),
                 tn.SendToNode("st1", reference="ies", to_key="in1")]),
             tn.SequentialNode(
                 "seq2",
                 [tn.InputNode("i2", shape=s),
                  tn.SendToNode("st2", reference="ies", to_key="in2")]),
             tn.SequentialNode(
                 "seq3",
                 [tn.InputNode("i3", shape=s),
                  tn.SendToNode("st3", reference="ies", to_key="in3")])]
        ).network()
        fn = network.function(["i1", "i2", "i3"], ["ies"])
        i1 = np.array(np.random.rand(*s), dtype=fX)
        i2 = np.array(np.random.rand(*s), dtype=fX)
        i3 = np.array(np.random.rand(*s), dtype=fX)
        np.testing.assert_allclose(i1 + i2 + i3,
                                   fn(i1, i2, i3)[0],
                                   rtol=1e-5)
Пример #2
0
def test_auxiliary_cost_node():
    network = tn.HyperparameterNode(
        "hp",
        tn.SequentialNode("seq", [
            tn.InputNode("x", shape=(3, 4, 5)),
            tn.AuxiliaryCostNode(
                "cost1", {"target": tn.InputNode("y1", shape=(3, 4, 5))}),
            tn.AddConstantNode("a1", value=2),
            tn.AuxiliaryCostNode(
                "cost2", {"target": tn.InputNode("y2", shape=(3, 4, 5))}),
            tn.MultiplyConstantNode("m1", value=2),
            tn.AuxiliaryCostNode(
                "cost3", {"target": tn.InputNode("y3", shape=(3, 4, 5))}),
            tn.ConstantNode("const", value=0),
            tn.InputElementwiseSumNode("cost")
        ]),
        cost_reference="cost",
        cost_function=treeano.utils.squared_error,
    ).network()
    fn = network.function(["x", "y1", "y2", "y3"], ["cost"])
    x = np.random.rand(3, 4, 5).astype(fX)
    ys = [np.random.rand(3, 4, 5).astype(fX) for _ in range(3)]

    def mse(x, y):
        return ((x - y)**2).mean()

    expected_output = (mse(x, ys[0]) + mse(x + 2, ys[1]) +
                       mse(2 * (x + 2), ys[2]))
    np.testing.assert_allclose(fn(x, *ys)[0], expected_output, rtol=1e-5)
Пример #3
0
def test_auxiliary_dense_softmax_cce_node():
    network = tn.SequentialNode("seq", [
        tn.InputNode("in", shape=(3, 5)),
        auxiliary_costs.AuxiliaryDenseSoftmaxCCENode(
            "aux",
            {"target": tn.ConstantNode("target", value=np.eye(3).astype(fX))},
            num_units=3,
            cost_reference="foo"),
        tn.IdentityNode("i"),
        tn.InputElementwiseSumNode("foo", ignore_default_input=True)
    ]).network()
    x = np.random.randn(3, 5).astype(fX)
    fn = network.function(["in"], ["i", "foo", "aux_dense"])
    res = fn(x)
    np.testing.assert_equal(res[0], x)
    loss = T.nnet.categorical_crossentropy(
        np.ones((3, 3), dtype=fX) / 3.0,
        np.eye(3).astype(fX),
    ).mean().eval()
    np.testing.assert_allclose(res[1], loss)
def test_auxiliary_contraction_penalty_node():
    # testing that both contraction penalty versions return the same thing
    network = tn.SequentialNode(
        "s",
        [tn.InputNode("i", shape=(10, 3)),
         cp.AuxiliaryContractionPenaltyNode(
             "acp",
             tn.DenseNode("d", num_units=9),
             cost_reference="sum"),
         cp.ElementwiseContractionPenaltyNode("cp", input_reference="i"),
         tn.AggregatorNode("a"),
         # zero out rest of network, so that value of sum is just value from
         # auxiliary contraction pentalty node
         tn.ConstantNode("foo", value=0),
         tn.InputElementwiseSumNode("sum")]
    ).network()
    fn = network.function(["i"], ["sum", "a"])
    x = np.random.rand(10, 3).astype(fX)
    res = fn(x)
    np.testing.assert_equal(res[0], res[1])
def test_auxiliary_kl_sparsity_penalty_node():
    # testing that both sparsity penalty versions return the same thing
    network = tn.HyperparameterNode(
        "hp",
        tn.SequentialNode(
            "s",
            [
                tn.InputNode("i", shape=(10, 3)),
                tn.DenseNode("d", num_units=9),
                sp.AuxiliaryKLSparsityPenaltyNode("scp", cost_reference="sum"),
                sp.ElementwiseKLSparsityPenaltyNode("sp"),
                tn.AggregatorNode("a"),
                # zero out rest of network, so that value of sum is just the value
                # from auxiliary sparsity pentalty node
                tn.ConstantNode("foo", value=0),
                tn.InputElementwiseSumNode("sum")
            ]),
        sparsity=0.1,
    ).network()
    fn = network.function(["i"], ["sum", "a"])
    x = np.random.rand(10, 3).astype(fX)
    res = fn(x)
    np.testing.assert_equal(res[0], res[1])
Пример #6
0
             tn.SequentialNode(
                 "cp_seq2",
                 [tn.DenseNode("fc2"),
                  # the cost has nan's when using ReLU's
                  # TODO look into why
                  tn.AbsNode("abs2")]),
             cost_weight=1e1),
         tn.DropoutNode("do2"),
         tn.DenseNode("fc3", num_units=10),
         tn.SoftmaxNode("pred"),
         tn.TotalCostNode(
             "cost",
             {"pred": tn.IdentityNode("pred_id"),
              "target": tn.InputNode("y", shape=(None,), dtype="int32")},
             cost_function=treeano.utils.categorical_crossentropy_i32),
         tn.InputElementwiseSumNode("total_cost")]),
    num_units=32,
    cost_reference="total_cost",
    dropout_probability=0.5,
    inits=[treeano.inits.XavierNormalInit()],
)

with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
        "adam",
        {"subtree": model,
         "cost": tn.ReferenceNode("cost_ref", reference="total_cost")}),
)
network = with_updates.network()
network.build()  # build eagerly to share weights
Пример #7
0
    dropout_probability=0.5,
    inits=[treeano.inits.XavierNormalInit()],
)

model = tn.L2PenaltyNode(
    "l2_cost",
    model,
    l2_weight=0.0001,
)

with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
        "adam",
        {"subtree": model,
         "cost": tn.InputElementwiseSumNode("cost")}),
    cost_function=treeano.utils.categorical_crossentropy_i32,
    cost_reference="cost",
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(dropout_probability=0),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
Пример #8
0
def test_input_elementwise_sum_node_serialization():
    tn.check_serialization(tn.InputElementwiseSumNode("a"))