Exemplo n.º 1
0
def test_submodule_reuse():
    inputs = np.zeros((1, 2))

    layer = Dense(5)
    net1 = Sequential(layer, Dense(2))
    net2 = Sequential(layer, Dense(3))

    layer_params = layer.init_parameters(inputs, key=PRNGKey(0))
    net1_params = net1.init_parameters(inputs,
                                       key=PRNGKey(1),
                                       reuse={layer: layer_params})
    net2_params = net2.init_parameters(inputs,
                                       key=PRNGKey(2),
                                       reuse={layer: layer_params})

    out1 = net1.apply(net1_params, inputs)
    assert out1.shape == (1, 2)

    out2 = net2.apply(net2_params, inputs)
    assert out2.shape == (1, 3)

    assert_dense_parameters_equal(layer_params, net1_params[0])
    assert_dense_parameters_equal(layer_params, net2_params[0])

    new_layer_params = layer.init_parameters(inputs, key=PRNGKey(3))
    combined_params = net1.parameters_from(
        {
            net1: net1_params,
            layer: new_layer_params
        }, inputs)
    assert_dense_parameters_equal(new_layer_params, combined_params.dense0)
    assert_dense_parameters_equal(net1_params.dense1, combined_params.dense1)
Exemplo n.º 2
0
def test_no_reuse():
    inputs = np.zeros((1, 2))

    layer = Dense(5)
    net1 = Sequential(layer, Dense(2))
    p1 = net1.init_parameters(inputs, key=PRNGKey(0))

    net2 = Sequential(layer, Dense(3))
    p2 = net2.init_parameters(inputs, key=PRNGKey(1))

    assert p1[0].kernel.shape == p2[0].kernel.shape
    assert p1[0].bias.shape == p2[0].bias.shape
    assert not np.array_equal(p1[0][0], p2[0][0])
    assert not np.array_equal(p1[0][1], p2[0][1])
Exemplo n.º 3
0
def test_ocr_rnn():
    length = 5
    carry_size = 3
    class_count = 4
    inputs = np.zeros((1, length, 4))

    def rnn():
        return Rnn(*GRUCell(carry_size, zeros))

    net = Sequential(
        rnn(),
        rnn(),
        rnn(),
        lambda x: np.reshape(x, (-1, carry_size)
                             ),  # -> same weights for all time steps
        Dense(class_count, zeros, zeros),
        softmax,
        lambda x: np.reshape(x, (-1, length, class_count)))

    params = net.init_parameters(PRNGKey(0), inputs)

    assert len(params) == 4
    cell = params.rnn0.gru_cell
    assert len(cell) == 3
    assert np.array_equal(np.zeros((7, 3)), cell.update_kernel)
    assert np.array_equal(np.zeros((7, 3)), cell.reset_kernel)
    assert np.array_equal(np.zeros((7, 3)), cell.compute_kernel)

    out = net.apply(params, inputs)
    assert np.array_equal(.25 * np.ones((1, 5, 4)), out)
Exemplo n.º 4
0
def test_submodule_reuse():
    inputs = np.zeros((1, 2))

    layer = Dense(5)
    net1 = Sequential(layer, Dense(2))
    net2 = Sequential(layer, Dense(3))

    layer_params = layer.init_parameters(PRNGKey(0), inputs)
    net1_params = net1.init_parameters(PRNGKey(1), inputs, reuse={layer: layer_params})
    net2_params = net2.init_parameters(PRNGKey(2), inputs, reuse={layer: layer_params})

    out1 = net1.apply(net1_params, inputs)
    assert out1.shape == (1, 2)

    out2 = net2.apply(net2_params, inputs)
    assert out2.shape == (1, 3)

    assert_dense_params_equal(layer_params, net1_params[0])
    assert_dense_params_equal(layer_params, net2_params[0])
Exemplo n.º 5
0
def test_regularized_submodule():
    net = Sequential(Conv(2, (1, 1)), relu, Conv(2, (1, 1)), relu, flatten,
                     L2Regularized(Sequential(Dense(2), relu, Dense(2), np.sum), .1))

    input = np.ones((1, 3, 3, 1))
    params = net.init_parameters(input, key=PRNGKey(0))
    assert (2, 2) == params.regularized.model.dense1.kernel.shape

    out = net.apply(params, input)
    assert () == out.shape
Exemplo n.º 6
0
def test_reparametrized_submodule():
    net = Sequential(
        Conv(2, (3, 3)), relu, Conv(2, (3, 3)), relu, flatten,
        Reparametrized(Sequential(Dense(2), relu, Dense(2)), Scaled))

    input = np.ones((1, 3, 3, 1))
    params = net.init_parameters(PRNGKey(0), input)
    assert (2, 2) == params.reparametrized.model.dense1.kernel.shape

    out = net.apply(params, input)
    assert (1, 2) == out.shape
Exemplo n.º 7
0
def test_reuse_api():
    inputs = np.zeros((1, 2))
    net = Dense(5)
    net_params = net.init_parameters(inputs, key=PRNGKey(0))

    # train net params...

    transfer_net = Sequential(net, relu, Dense(2))
    transfer_net_params = transfer_net.init_parameters(inputs, key=PRNGKey(1),
                                                       reuse={net: net_params})

    assert net_params == transfer_net_params.dense0
Exemplo n.º 8
0
def test_input_dependent_nested_modules():
    @parametrized
    def layer(inputs):
        return Dense(inputs.shape[0])(inputs)

    net = Sequential(Dense(3), layer)

    inputs = np.zeros((5, 3))
    params = net.init_parameters(inputs, key=PRNGKey(0))

    out = net.apply(params, inputs)
    assert (5, 5) == out.shape
Exemplo n.º 9
0
def test_pool_shape(Pool):
    conv = Conv(2,
                filter_shape=(3, 3),
                padding='SAME',
                kernel_init=zeros,
                bias_init=zeros)
    inputs = np.zeros((1, 5, 5, 2))

    pooled = Sequential(conv, Pool(window_shape=(1, 1), strides=(2, 2)))
    params = pooled.init_parameters(PRNGKey(0), inputs)
    out = pooled.apply(params, inputs)
    assert np.array_equal(np.zeros((1, 3, 3, 2)), out)
Exemplo n.º 10
0
def test_external_param_sharing():
    layer = Dense(2, zeros, zeros)
    shared_net = Sequential(layer, layer)

    inputs = np.zeros((1, 2))
    params = shared_net.init_parameters(inputs, key=PRNGKey(0))
    assert_parameters_equal(((np.zeros((2, 2)), np.zeros(2)), ), params)

    out = shared_net.apply(params, inputs)
    assert np.array_equal(np.zeros((1, 2)), out)

    out = shared_net.apply(params, inputs, jit=True)
    assert np.array_equal(np.zeros((1, 2)), out)
Exemplo n.º 11
0
def test_collection_input(type):
    @parametrized
    def net(inputs):
        assert isinstance(inputs, type)
        return inputs[0] * inputs[1] * parameter((), zeros)

    inputs = type((np.zeros(2), np.zeros(2)))
    params = net.init_parameters(inputs, key=PRNGKey(0))
    out = net.apply(params, inputs)
    assert np.array_equal(np.zeros(2), out)

    net = Sequential(net)
    params = net.init_parameters(inputs, key=PRNGKey(0))
    out = net.apply(params, inputs)
    assert np.array_equal(np.zeros(2), out)
Exemplo n.º 12
0
def test_external_sequential_submodule():
    layer = Sequential(Conv(4, (2, 2)), flatten, relu, Dense(3), relu,
                       Dense(2), Sequential(Dense(2), relu))
    inputs = np.zeros((1, 5, 5, 2))

    params = layer.init_parameters(inputs, key=PRNGKey(0))
    assert (4, ) == params.conv.bias.shape
    assert (3, ) == params.dense0.bias.shape
    assert (3, 2) == params.dense1.kernel.shape
    assert (2, ) == params.dense1.bias.shape
    assert (2, ) == params.sequential.dense.bias.shape

    out = layer.apply(params, inputs)
    assert (1, 2) == out.shape

    out_ = layer.apply(params, inputs, jit=True)
    assert np.allclose(out, out_)
Exemplo n.º 13
0
def test_parameters_from_shared_submodules():
    sublayer = Dense(2)
    a = Sequential(sublayer, relu)
    b = Sequential(sublayer, np.sum)

    @parametrized
    def net(inputs):
        return a(inputs) * b(inputs)

    inputs = np.zeros((1, 3))
    a_params = a.init_parameters(inputs, key=PRNGKey(0))
    out = a.apply(a_params, inputs)

    params = net.parameters_from({a: a_params}, inputs)
    assert_parameters_equal(a_params.dense.kernel,
                            params.sequential0.dense.kernel)
    assert_parameters_equal((), params.sequential1)
    out = net.apply(params, inputs)

    out_ = net.apply_from({a: a_params}, inputs)
    assert np.array_equal(out, out_)

    out_ = net.apply_from({a: a_params}, inputs, jit=True)
    assert np.array_equal(out, out_)

    out_ = net.apply_from({a.shaped(inputs): a_params}, inputs)
    assert np.array_equal(out, out_)

    out_ = net.apply_from({a.shaped(inputs): a_params}, inputs, jit=True)
    assert np.array_equal(out, out_)

    out_ = net.shaped(inputs).apply_from({a: a_params})
    assert np.array_equal(out, out_)

    out_ = net.shaped(inputs).apply_from({a: a_params}, jit=True)
    assert np.array_equal(out, out_)

    out_ = net.shaped(inputs).apply_from({a.shaped(inputs): a_params})
    assert np.array_equal(out, out_)

    out_ = net.shaped(inputs).apply_from({a.shaped(inputs): a_params},
                                         jit=True)
    assert np.array_equal(out, out_)
Exemplo n.º 14
0
def test_parameters_from_subsubmodule():
    subsublayer = Dense(2)
    sublayer = Sequential(subsublayer, relu)
    net = Sequential(sublayer, np.sum)
    inputs = np.zeros((1, 3))
    params = net.init_parameters(inputs, key=PRNGKey(0))
    out = net.apply(params, inputs)

    subsublayer_params = subsublayer.init_parameters(inputs, key=PRNGKey(0))

    params_ = net.parameters_from({subsublayer: subsublayer_params}, inputs)
    assert_dense_parameters_equal(subsublayer_params, params_[0][0])
    out_ = net.apply(params_, inputs)
    assert out.shape == out_.shape

    out_ = net.apply_from({subsublayer: subsublayer_params}, inputs)
    assert out.shape == out_.shape

    out_ = net.apply_from({subsublayer: subsublayer_params}, inputs, jit=True)
    assert out.shape == out_.shape
Exemplo n.º 15
0
def test_params_from_shared_submodules2():
    sublayer = Dense(2)
    a = Sequential(sublayer, relu)
    b = Sequential(sublayer, np.sum)

    @parametrized
    def net(inputs):
        return a(inputs), b(inputs)

    inputs = np.zeros((1, 3))
    a_params = a.init_parameters(PRNGKey(0), inputs)
    out = a.apply(a_params, inputs)

    params = net.parameters_from({a: a_params}, inputs)
    assert_dense_params_equal(a_params.dense, params.sequential0.dense)
    assert_dense_params_equal(a_params.dense, params.sequential1.dense)
    # TODO parameters are duplicated, optimization with weight sharing is wrong:
    # TODO instead: assert 1 == len(params)
    out_, _ = net.apply(params, inputs)
    assert np.array_equal(out, out_)
Exemplo n.º 16
0
def test_ocr_rnn():
    length = 5
    carry_size = 3
    class_count = 4
    inputs = jnp.zeros((1, length, 4))

    def rnn():
        return Rnn(*GRUCell(carry_size, zeros))

    net = Sequential(
        rnn(),
        rnn(),
        rnn(),
        lambda x: jnp.reshape(x, (-1, carry_size)
                              ),  # -> same weights for all time steps
        Dense(class_count, zeros, zeros),
        softmax,
        lambda x: jnp.reshape(x, (-1, length, class_count)))

    params = net.init_parameters(inputs, key=PRNGKey(0))

    assert len(params) == 4
    cell = params.rnn0.gru_cell
    assert len(cell) == 3
    assert jnp.array_equal(jnp.zeros((7, 3)), cell.update_kernel)
    assert jnp.array_equal(jnp.zeros((7, 3)), cell.reset_kernel)
    assert jnp.array_equal(jnp.zeros((7, 3)), cell.compute_kernel)

    out = net.apply(params, inputs)

    @parametrized
    def cross_entropy(images, targets):
        prediction = net(images)
        return jnp.mean(-jnp.sum(targets * jnp.log(prediction), (1, 2)))

    opt = optimizers.RmsProp(0.003)
    state = opt.init(cross_entropy.init_parameters(inputs, out,
                                                   key=PRNGKey(0)))
    state = opt.update(cross_entropy.apply, state, inputs, out)
    opt.update(cross_entropy.apply, state, inputs, out, jit=True)
Exemplo n.º 17
0
def test_parametrized_jit_parameter_sharing():
    d = Dense(3)
    net = Sequential(d, jit(d))
    params = net.init_parameters(np.zeros((2, 3)), key=PRNGKey(0))
    assert len(params) == 1
    net.apply(params, np.zeros((2, 3)))