Exemplo n.º 1
0
def main(jax: bool = False, pytorch: bool = False, gpu_id: int = -1):
    global CONFIG
    fix_random_seed(0)
    if gpu_id >= 0:
        require_gpu(gpu_id)
        print("Set GPU", gpu_id)
    backends = {"jax": jax, "pytorch": pytorch}
    for name, use_backend in backends.items():
        if not use_backend:
            print(f"Skipping {name}")
            continue
        set_backend(name, gpu_id)
        C = registry.make_from_config(Config().from_str(CONFIG))
        model = C["model"]
        X, Y = get_dummy_data(**C["data"])
        print("Copy to device")
        X = [model.ops.asarray(x) for x in X]
        Y = [model.ops.asarray(y) for y in Y]
        print("Begin init", len(X))
        model.initialize(X=X[:5])
        print("Pre-batch")
        n_words = sum(len(x) for x in X)
        X = [model.layers[0].predict(batch) for batch in model.ops.minibatch(16, X)]
        model.layers.pop(0)
        print("Start")
        start_time = timer()
        end_time = timer()
        print(name, n_words, end_time - start_time)
Exemplo n.º 2
0
def check_transform(transform, in_data, out_data):
    model = registry.make_from_config({"config": {"@layers": transform}})["config"]
    input_checker = get_data_checker(in_data)
    output_checker = get_data_checker(out_data)
    model.initialize(in_data, out_data)
    Y, backprop = model(in_data, is_train=True)
    output_checker(Y, out_data)
    dX = backprop(Y)
    input_checker(dX, in_data)
Exemplo n.º 3
0
def test_layers_with_residual(name, kwargs, in_data, out_data):
    cfg = {"@layers": "residual.v1", "layer": {"@layers": name, **kwargs}}
    filled = registry.fill_config({"config": cfg})
    model = registry.make_from_config(filled)["config"]
    if "LSTM" in name:
        model = with_padded(model)
    model.initialize(in_data, out_data)
    Y, backprop = model(in_data, is_train=True)
    assert_data_match(Y, out_data)
    dX = backprop(Y)
    assert_data_match(dX, in_data)
Exemplo n.º 4
0
def test_optimizer_schedules_from_config(schedule_valid):
    lr, lr_next1, lr_next2, lr_next3 = schedule_valid
    cfg = {"@optimizers": "Adam.v1", "learn_rate": lr}
    optimizer = registry.make_from_config({"cfg": cfg})["cfg"]
    assert optimizer.learn_rate == lr_next1
    optimizer.step_schedules()
    assert optimizer.learn_rate == lr_next2
    optimizer.step_schedules()
    assert optimizer.learn_rate == lr_next3
    optimizer.learn_rate = 1.0
    assert optimizer.learn_rate == 1.0
Exemplo n.º 5
0
def test_layers_from_config(name, kwargs, in_data, out_data):
    cfg = {"@layers": name, **kwargs}
    filled = registry.fill_config({"config": cfg})
    model = registry.make_from_config(filled)["config"]
    if "LSTM" in name:
        model = with_padded(model)
    if "FeatureExtractor" in name:  # can't validate fake docs:
        DATA_VALIDATION.set(False)
    model.initialize(in_data, out_data)
    Y, backprop = model(in_data, is_train=True)
    assert_data_match(Y, out_data)
    dX = backprop(Y)
    assert_data_match(dX, in_data)
    DATA_VALIDATION.set(True)
Exemplo n.º 6
0
def test_optimizers_from_config(name):
    learn_rate = 0.123
    cfg = {"@optimizers": name, "learn_rate": learn_rate}
    optimizer = registry.make_from_config({"config": cfg})["config"]
    assert optimizer.learn_rate == learn_rate