def test_save_and_load_params(): params = Dense(2).init_parameters(PRNGKey(0), np.zeros((1, 2))) from pathlib import Path path = Path('/') / 'tmp' / 'net.params' save_params(params, path) params_ = load_params(path) assert_dense_params_equal(params, params_)
def test_submodule_reuse_top_level(): net = Dense(2) inputs = np.zeros((1, 3)) params = net.init_parameters(PRNGKey(0), inputs) out = net.apply(params, inputs) params_ = net.init_parameters(PRNGKey(1), inputs, reuse={net: params}) assert_dense_params_equal(params, params_) out_ = net.apply(params_, inputs) assert np.array_equal(out, out_)
def test_params_from_top_level(): net = Dense(2) inputs = np.zeros((1, 3)) params = net.init_parameters(PRNGKey(0), inputs) out = net.apply(params, inputs) params_ = net.parameters_from({net: params}, inputs) assert_dense_params_equal(params, params_) out_ = net.apply(params_, inputs) assert np.array_equal(out, out_) out_ = net.apply_from({net: params}, inputs) assert np.array_equal(out, out_) out_ = net.apply_from({net: params}, inputs, jit=True) assert np.array_equal(out, out_)
def test_submodule_reuse(): inputs = np.zeros((1, 2)) layer = Dense(5) net1 = Sequential(layer, Dense(2)) net2 = Sequential(layer, Dense(3)) layer_params = layer.init_parameters(PRNGKey(0), inputs) net1_params = net1.init_parameters(PRNGKey(1), inputs, reuse={layer: layer_params}) net2_params = net2.init_parameters(PRNGKey(2), inputs, reuse={layer: layer_params}) out1 = net1.apply(net1_params, inputs) assert out1.shape == (1, 2) out2 = net2.apply(net2_params, inputs) assert out2.shape == (1, 3) assert_dense_params_equal(layer_params, net1_params[0]) assert_dense_params_equal(layer_params, net2_params[0])
def test_params_from_shared_submodules2(): sublayer = Dense(2) a = Sequential(sublayer, relu) b = Sequential(sublayer, np.sum) @parametrized def net(inputs): return a(inputs), b(inputs) inputs = np.zeros((1, 3)) a_params = a.init_parameters(PRNGKey(0), inputs) out = a.apply(a_params, inputs) params = net.parameters_from({a: a_params}, inputs) assert_dense_params_equal(a_params.dense, params.sequential0.dense) assert_dense_params_equal(a_params.dense, params.sequential1.dense) # TODO parameters are duplicated, optimization with weight sharing is wrong: # TODO instead: assert 1 == len(params) out_, _ = net.apply(params, inputs) assert np.array_equal(out, out_)
def test_params_from_subsubmodule(): subsublayer = Dense(2) sublayer = Sequential(subsublayer, relu) net = Sequential(sublayer, np.sum) inputs = np.zeros((1, 3)) params = net.init_parameters(PRNGKey(0), inputs) out = net.apply(params, inputs) subsublayer_params = subsublayer.init_parameters(PRNGKey(0), inputs) params_ = net.parameters_from({subsublayer: subsublayer_params}, inputs) assert_dense_params_equal(subsublayer_params, params_[0][0]) out_ = net.apply(params_, inputs) assert out.shape == out_.shape out_ = net.apply_from({subsublayer: subsublayer_params}, inputs) assert out.shape == out_.shape out_ = net.apply_from({subsublayer: subsublayer_params}, inputs, jit=True) assert out.shape == out_.shape