Exemplo n.º 1
0
def test_to_preallocated_init1():
    network1 = tn.SequentialNode(
        "seq",
        [tn.InputNode("i", shape=(3, 4, 5)),
         tn.LinearMappingNode(
             "lm",
             output_dim=15,
             inits=[treeano.inits.NormalWeightInit(15.0)])]
    ).network()
    inits = [canopy.network_utils.to_preallocated_init(network1)]
    network2 = tn.SequentialNode(
        "seq",
        [tn.InputNode("i", shape=(3, 4, 5)),
         tn.LinearMappingNode(
             "lm",
             output_dim=15,
             inits=inits)]
    ).network()

    w1 = list(canopy.network_utils.to_shared_dict(network1).values())[0]
    w2 = list(canopy.network_utils.to_shared_dict(network2).values())[0]
    # both networks should be using the exact same shared variables
    assert w1 is w2

    fn1 = network1.function(["i"], ["lm"])
    fn2 = network2.function(["i"], ["lm"])
    x = np.random.randn(3, 4, 5).astype(fX)
    np.testing.assert_equal(fn1(x),
                            fn2(x))
Exemplo n.º 2
0
def test_to_shared_dict_relative_network():
    network = tn.SequentialNode(
        "seq",
        [tn.InputNode("i", shape=(10,)),
         tn.LinearMappingNode("lm1", output_dim=15),
         tn.LinearMappingNode("lm2", output_dim=15)]
    ).network()
    nt.assert_equal({"lm1:weight", "lm2:weight"},
                    set(canopy.network_utils.to_shared_dict(network)))
    nt.assert_equal({"lm1:weight"},
                    set(canopy.network_utils.to_shared_dict(network["lm1"])))
    nt.assert_equal({"lm2:weight"},
                    set(canopy.network_utils.to_shared_dict(network["lm2"])))
Exemplo n.º 3
0
def test_transform_root_node():
    network1 = tn.toy.ConstantUpdaterNode(
        "cun",
        tn.SequentialNode("seq", [
            tn.InputNode("i", shape=(3, 4, 5)),
            tn.LinearMappingNode("lm",
                                 output_dim=15,
                                 inits=[treeano.inits.NormalWeightInit(15.0)])
        ]),
        value=-0.1,
    ).network()
    # perform build eagerly to initialize weights
    network1.build()

    network2 = canopy.transforms.transform_root_node(network1, fn=lambda x: x)

    fn1 = network1.function(["i"], ["lm"])
    fn2 = network2.function(["i"], ["lm"])
    fn1u = network1.function(["i"], ["lm"], include_updates=True)
    fn2u = network2.function(["i"], ["lm"], include_updates=True)
    x = np.random.randn(3, 4, 5).astype(fX)
    np.testing.assert_equal(fn1(x), fn2(x))
    fn1u(x)
    np.testing.assert_equal(fn1(x), fn2(x))
    fn2u(x)
    np.testing.assert_equal(fn1(x), fn2(x))
Exemplo n.º 4
0
def test_load_value_dict_not_strict_keys():
    n1 = tn.SequentialNode(
        "seq",
        [tn.InputNode("i", shape=(10, 100)),
         tn.LinearMappingNode(
             "lm",
             output_dim=15,
             inits=[treeano.inits.NormalWeightInit()])]
    ).network()
    n2 = tn.InputNode("i", shape=()).network()

    def test1(strict_keys):
        canopy.network_utils.load_value_dict(
            n1,
            canopy.network_utils.to_value_dict(n2),
            strict_keys=strict_keys)

    def test2(strict_keys):
        canopy.network_utils.load_value_dict(
            n2,
            canopy.network_utils.to_value_dict(n1),
            strict_keys=strict_keys)

    nt.raises(AssertionError)(test1)(strict_keys=True)
    nt.raises(AssertionError)(test2)(strict_keys=True)
    test1(strict_keys=False)
    test2(strict_keys=False)
Exemplo n.º 5
0
def test_override_hyperparameters2():
    network = tn.toy.ConstantUpdaterNode(
        "cun",
        tn.SequentialNode("seq", [
            tn.InputNode("i", shape=(3, 4, 5)),
            tn.LinearMappingNode("lm",
                                 output_dim=15,
                                 inits=[treeano.inits.NormalWeightInit(15.0)])
        ]),
        value=-0.1,
    ).network()

    fn1 = network.function(["i"], ["lm"])
    fn1u = network.function(["i"], ["lm"], include_updates=True)
    fn2_args = (network, [canopy.handlers.override_hyperparameters(value=2)], {
        "x": "i"
    }, {
        "out": "lm"
    })
    fn2 = canopy.handlers.handled_fn(*fn2_args)
    fn2u = canopy.handlers.handled_fn(*fn2_args, include_updates=True)

    x = np.random.randn(3, 4, 5).astype(fX)
    np.testing.assert_equal(fn1(x)[0], fn2({"x": x})["out"])
    fn1u(x)
    np.testing.assert_equal(fn1(x)[0], fn2({"x": x})["out"])
    fn2u({"x": x})
    np.testing.assert_equal(fn1(x)[0], fn2({"x": x})["out"])
Exemplo n.º 6
0
 def get_shapes(output_dim):
     network = tn.SequentialNode("s", [
         tn.InputNode("in", shape=(3, 4, 5)),
         tn.LinearMappingNode("linear", output_dim=output_dim),
     ]).network()
     weight_shape = network["linear"].get_vw("weight").shape
     output_shape = network["s"].get_vw("default").shape
     return weight_shape, output_shape
Exemplo n.º 7
0
 def new_network():
     return tn.SequentialNode(
         "seq",
         [tn.InputNode("i", shape=(10, 100)),
          tn.LinearMappingNode(
              "lm",
              output_dim=15,
              inits=[treeano.inits.NormalWeightInit()])]
     ).network()
Exemplo n.º 8
0
def test_to_value_dict():
    network = tn.SequentialNode(
        "seq",
        [tn.InputNode("i", shape=(10,)),
         tn.LinearMappingNode(
             "lm",
             output_dim=15,
             inits=[treeano.inits.ConstantInit(42.42)])]
    ).network()
    sd = canopy.network_utils.to_value_dict(network)
    nt.assert_equal(["lm:weight"], list(sd.keys()))
    np.testing.assert_equal(42.42 * np.ones((10, 15), dtype=fX),
                            sd["lm:weight"])
Exemplo n.º 9
0
def test_to_preallocated_init2():
    # test that networks are kept in sync even when updating
    network1 = tn.toy.ConstantUpdaterNode(
        "cun",
        tn.SequentialNode(
            "seq",
            [tn.InputNode("i", shape=(3, 4, 5)),
             tn.LinearMappingNode(
                 "lm",
                 output_dim=15,
                 inits=[treeano.inits.NormalWeightInit(15.0)])]),
        value=-0.1,
    ).network()
    inits = [canopy.network_utils.to_preallocated_init(network1)]
    network2 = tn.toy.ConstantUpdaterNode(
        "cun",
        tn.SequentialNode(
            "seq",
            [tn.InputNode("i", shape=(3, 4, 5)),
             tn.LinearMappingNode(
                 "lm",
                 output_dim=15,
                 inits=inits)]),
        value=0.4,
    ).network()

    fn1 = network1.function(["i"], ["lm"])
    fn2 = network2.function(["i"], ["lm"])
    fn1u = network1.function(["i"], ["lm"], include_updates=True)
    fn2u = network2.function(["i"], ["lm"], include_updates=True)
    x = np.random.randn(3, 4, 5).astype(fX)
    np.testing.assert_equal(fn1(x), fn2(x))
    fn1u(x)
    np.testing.assert_equal(fn1(x), fn2(x))
    fn2u(x)
    np.testing.assert_equal(fn1(x), fn2(x))
Exemplo n.º 10
0
def test_linear_mapping_node():
    network = tn.SequentialNode("s", [
        tn.InputNode("in", shape=(3, 4, 5)),
        tn.LinearMappingNode("linear", output_dim=6),
    ]).network()
    weight_var = network["linear"].get_vw("weight")
    fn = network.function(["in"], ["s"])
    x = np.random.randn(3, 4, 5).astype(fX)
    W = np.random.randn(5, 6).astype(fX)
    # test that weight is 0 initially
    np.testing.assert_allclose(fn(x)[0], np.zeros((3, 4, 6)))
    # set weight_var value to new value
    weight_var.value = W
    # test that adding works
    np.testing.assert_allclose(np.dot(x, W), fn(x)[0], rtol=1e-4, atol=1e-7)
Exemplo n.º 11
0
def test_pickle_unpickle_network():
    temp_dir = tempfile.mkdtemp()
    dirname = os.path.join(temp_dir, "network")
    try:
        n1 = tn.SequentialNode("seq", [
            tn.InputNode("i", shape=(10, 100)),
            tn.LinearMappingNode(
                "lm", output_dim=15, inits=[treeano.inits.NormalWeightInit()])
        ]).network()

        fn1 = n1.function(["i"], ["lm"])
        x = np.random.randn(10, 100).astype(fX)
        canopy.serialization.pickle_network(n1, dirname)
        n2 = canopy.serialization.unpickle_network(dirname)
        fn2 = n2.function(["i"], ["lm"])
        np.testing.assert_equal(fn1(x), fn2(x))
    finally:
        shutil.rmtree(temp_dir)
Exemplo n.º 12
0
def test_monitor_update_ratio_node():
    network = tn.WeightDecayNode(
        "decay",
        monitor_update_ratio.MonitorUpdateRatioNode(
            "mur",
            tn.SequentialNode(
                "s",
                [tn.InputNode("i", shape=(None, 3)),
                 tn.LinearMappingNode("linear", output_dim=10),
                 tn.AddBiasNode("bias")])),
        weight_decay=1
    ).network()
    network.build()
    mur_net = network["mur"]
    vws = mur_net.find_vws_in_subtree(tags={"monitor"})
    assert len(vws) == 1
    vw, = vws
    assert re.match(".*_2-norm$", vw.name)
    assert re.match(".*linear.*", vw.name)
    assert not re.match(".*bias.*", vw.name)
Exemplo n.º 13
0
def test_transform_root_node_postwalk():
    network1 = tn.toy.ConstantUpdaterNode(
        "cun",
        tn.SequentialNode("seq", [
            tn.InputNode("i", shape=(3, 4, 5)),
            tn.LinearMappingNode("lm",
                                 output_dim=15,
                                 inits=[treeano.inits.NormalWeightInit(15.0)])
        ]),
        value=-0.1,
    ).network()

    def log_name(node):
        all_names.append(node.name)
        return node

    all_names = []
    canopy.transforms.transform_root_node_postwalk(network1, log_name)
    nt.assert_equal(all_names, ["i", "lm", "seq", "cun"])

    def append_name(node):
        # NOTE: assumes NodeImpl subclass
        node = treeano.node_utils.copy_node(node)
        node._name += "_1"
        return node

    network2 = canopy.transforms.transform_root_node_postwalk(
        network1, append_name)

    all_names = []
    canopy.transforms.transform_root_node_postwalk(network2, log_name)
    nt.assert_equal(all_names, ["i_1", "lm_1", "seq_1", "cun_1"])

    # assert unmodified
    all_names = []
    canopy.transforms.transform_root_node_postwalk(network1, log_name)
    nt.assert_equal(all_names, ["i", "lm", "seq", "cun"])
Exemplo n.º 14
0
    for _ in range(batch_size):
        i, o = binary_toy_data(lag, length)
        inputs.append(i)
        outputs.append(o)
    return np.array(inputs)[..., np.newaxis], np.array(outputs)[..., np.newaxis]


# ############################## prepare model ##############################

model = tn.HyperparameterNode(
    "model",
    tn.SequentialNode(
        "seq",
        [tn.InputNode("x", shape=(None, None, 1)),
         recurrent_hc.GRUNode("gru1"),
         tn.LinearMappingNode("y_linear", output_dim=1),
         tn.AddBiasNode("y_bias", broadcastable_axes=(0, 1)),
         tn.SigmoidNode("sigmoid"),
         ]),
    inits=[treeano.inits.OrthogonalInit()],
    num_units=HIDDEN_STATE_SIZE,
    learn_init=True,
    grad_clip=1,
)

with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
        "adam",
        {"subtree": model,
         "cost": tn.TotalCostNode("cost", {
Exemplo n.º 15
0
def test_linear_mapping_node_serialization():
    tn.check_serialization(tn.LinearMappingNode("a"))
    tn.check_serialization(tn.LinearMappingNode("a", output_dim=3))