예제 #1
0
def test_use_scheduled_hyperparameter():
    network1 = tn.OutputHyperparameterNode(
        "a",
        hyperparameter="foo").network(default_hyperparameters=dict(foo=101))
    network2 = tn.SequentialNode("s", [
        tn.OutputHyperparameterNode("a", hyperparameter="foo"),
        tn.MultiplyConstantNode("m", value=42)
    ]).network(default_hyperparameters=dict(foo=101))

    schedule = canopy.schedules.PiecewiseLinearSchedule([(1, 1), (10, 10)])
    sh_handler = canopy.handlers.schedule_hyperparameter(schedule, "foo")

    fn2 = canopy.handled_fn(
        network2, [canopy.handlers.use_scheduled_hyperparameter(sh_handler)],
        {}, {"out": "s"})

    def callback(in_dict, result_dict):
        result_dict["out2"] = fn2(in_dict)["out"]

    fn1 = canopy.handled_fn(
        network1,
        [sh_handler, canopy.handlers.call_after_every(1, callback)], {},
        {"out": "a"})

    res = fn1({})
    nt.assert_equal(res, {"out": 1, "out2": 42})
    res = fn1({})
    nt.assert_equal(res, {"out": 2, "out2": 84})
예제 #2
0
def test_use_scheduled_hyperparameter():
    network1 = tn.OutputHyperparameterNode("a", hyperparameter="foo").network(
        default_hyperparameters=dict(foo=101)
    )
    network2 = tn.SequentialNode(
        "s",
        [tn.OutputHyperparameterNode("a", hyperparameter="foo"),
         tn.MultiplyConstantNode("m", value=42)]).network(
             default_hyperparameters=dict(foo=101)
    )

    schedule = canopy.schedules.PiecewiseLinearSchedule([(1, 1), (10, 10)])
    sh_handler = canopy.handlers.schedule_hyperparameter(schedule, "foo")

    fn2 = canopy.handled_fn(
        network2,
        [canopy.handlers.use_scheduled_hyperparameter(sh_handler)],
        {},
        {"out": "s"})

    def callback(in_dict, result_dict):
        result_dict["out2"] = fn2(in_dict)["out"]

    fn1 = canopy.handled_fn(network1,
                            [sh_handler,
                             canopy.handlers.call_after_every(1, callback)],
                            {},
                            {"out": "a"})

    res = fn1({})
    nt.assert_equal(res, {"out": 1, "out2": 42})
    res = fn1({})
    nt.assert_equal(res, {"out": 2, "out2": 84})
예제 #3
0
def sensitivity_analysis_fn(input_name,
                            logit_name,
                            network,
                            handlers,
                            inputs=None,
                            **kwargs):
    """
    returns a function from input to sensitivity analysis heatmap
    """
    handlers = [
        SensitivityAnalysisOutput(idx_input_key="idx",
                                  output_key="outputs",
                                  input_name=input_name,
                                  logit_name=logit_name),
        canopy.handlers.override_hyperparameters(deterministic=True)
    ] + handlers

    fn = canopy.handled_fn(network,
                           handlers=handlers,
                           inputs={"input": input_name},
                           outputs={})

    def inner(in_val, idx_val):
        return fn({"input": in_val, "idx": idx_val})["outputs"]

    return inner
예제 #4
0
def test_fn(network):
    return canopy.handled_fn(
        network,
        [canopy.handlers.override_hyperparameters(deterministic=True),
         canopy.handlers.batch_pad(batch_size=BATCH_SIZE, keys=["x"]),
         canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                         variables=["x"])],
        {"x": "x"},
        {"transformed": "st"})
예제 #5
0
def train_network(network, in_train, in_valid, max_iters):
    valid_fn = canopy.handled_fn(
        network,
        [
            canopy.handlers.time_call(key="valid_time"),
            canopy.handlers.override_hyperparameters(deterministic=True),
            canopy.handlers.chunk_variables(batch_size=BATCH_SIZE, variables=["x", "y"]),
        ],
        {"x": "x", "y": "y"},
        {"valid_cost": "cost", "pred": "pred"},
    )

    def validate(in_dict, result_dict):
        valid_out = valid_fn(in_valid)
        probabilities = valid_out.pop("pred")
        predicted_classes = np.argmax(probabilities, axis=1)
        result_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(in_valid["y"], predicted_classes)
        result_dict.update(valid_out)

    train_fn = canopy.handled_fn(
        network,
        [
            canopy.handlers.time_call(key="total_time"),
            canopy.handlers.call_after_every(1, validate),
            canopy.handlers.time_call(key="train_time"),
            canopy.handlers.chunk_variables(batch_size=BATCH_SIZE, variables=["x", "y"]),
        ],
        {"x": "x", "y": "y"},
        {"train_cost": "cost"},
        include_updates=True,
    )

    def callback(results_dict):
        print(
            "{_iter:3d}: "
            "train_cost: {train_cost:0.3f} "
            "valid_cost: {valid_cost:0.3f} "
            "valid_accuracy: {valid_accuracy:0.3f}".format(**results_dict)
        )

    print("Starting training...")
    canopy.evaluate_until(fn=train_fn, gen=itertools.repeat(in_train), max_iters=max_iters, callback=callback)
예제 #6
0
def test_fn(network):
    return canopy.handled_fn(
        network,
        [
            canopy.handlers.override_hyperparameters(deterministic=True),
            canopy.handlers.batch_pad(batch_size=BATCH_SIZE, keys=["x"]),
            canopy.handlers.chunk_variables(batch_size=BATCH_SIZE, variables=["x"]),
        ],
        {"x": "x"},
        {"transformed": "st"},
    )
예제 #7
0
def train_network(network, in_train, in_valid, max_iters):
    valid_fn = canopy.handled_fn(
        network,
        [canopy.handlers.time_call(key="valid_time"),
         canopy.handlers.override_hyperparameters(deterministic=True),
         canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                         variables=["x", "y"])],
        {"x": "x", "y": "y"},
        {"valid_cost": "cost", "pred": "pred"})

    def validate(in_dict, result_dict):
        valid_out = valid_fn(in_valid)
        probabilities = valid_out.pop("pred")
        predicted_classes = np.argmax(probabilities, axis=1)
        result_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
            in_valid["y"], predicted_classes)
        result_dict.update(valid_out)

    train_fn = canopy.handled_fn(
        network,
        [canopy.handlers.time_call(key="total_time"),
         canopy.handlers.call_after_every(1, validate),
         canopy.handlers.time_call(key="train_time"),
         canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                         variables=["x", "y"])],
        {"x": "x", "y": "y"},
        {"train_cost": "cost"},
        include_updates=True)

    def callback(results_dict):
        print("{_iter:3d}: "
              "train_cost: {train_cost:0.3f} "
              "valid_cost: {valid_cost:0.3f} "
              "valid_accuracy: {valid_accuracy:0.3f}".format(**results_dict))

    print("Starting training...")
    canopy.evaluate_until(fn=train_fn,
                          gen=itertools.repeat(in_train),
                          max_iters=max_iters,
                          callback=callback)
예제 #8
0
def test_schedule_hyperparameter_very_leaky_relu():
    network = tn.SequentialNode(
        "s", [tn.InputNode("i", shape=()),
              tn.VeryLeakyReLUNode("r")]).network()

    def schedule(in_dict, out_dict):
        return 10

    fn = canopy.handled_fn(
        network,
        [canopy.handlers.schedule_hyperparameter(schedule, "leak_alpha")],
        {"x": "i"}, {"out": "s"})
    res = fn({"x": -2})["out"]
    nt.assert_equal(res, -20)
예제 #9
0
def test_schedule_hyperparameter_very_leaky_relu():
    network = tn.SequentialNode(
        "s",
        [tn.InputNode("i", shape=()),
         tn.VeryLeakyReLUNode("r")]
    ).network()

    def schedule(in_dict, out_dict):
        return 10

    fn = canopy.handled_fn(
        network,
        [canopy.handlers.schedule_hyperparameter(schedule, "leak_alpha")],
        {"x": "i"},
        {"out": "s"})
    res = fn({"x": -2})["out"]
    nt.assert_equal(res, -20)
예제 #10
0
def test_schedule_hyperparameter():
    network = tn.OutputHyperparameterNode(
        "a",
        hyperparameter="foo").network(default_hyperparameters=dict(foo=101))

    def schedule(in_dict, out_dict):
        if out_dict is None:
            return 100
        else:
            return treeano.utils.as_fX(np.random.rand() * out_dict["out"])

    fn = canopy.handled_fn(
        network, [canopy.handlers.schedule_hyperparameter(schedule, "foo")],
        {}, {"out": "a"})
    prev = fn({})["out"]
    assert prev != 101
    nt.assert_equal(prev, 100)
    for _ in range(10):
        curr = fn({})["out"]
        assert curr < prev
        prev = curr
예제 #11
0
def test_schedule_hyperparameter():
    network = tn.OutputHyperparameterNode("a", hyperparameter="foo").network(
        default_hyperparameters=dict(foo=101)
    )

    def schedule(in_dict, out_dict):
        if out_dict is None:
            return 100
        else:
            return treeano.utils.as_fX(np.random.rand() * out_dict["out"])

    fn = canopy.handled_fn(network,
                           [canopy.handlers.schedule_hyperparameter(schedule,
                                                                    "foo")],
                           {},
                           {"out": "a"})
    prev = fn({})["out"]
    assert prev != 101
    nt.assert_equal(prev, 100)
    for _ in range(10):
        curr = fn({})["out"]
        assert curr < prev
        prev = curr
예제 #12
0
def customizable_sensitivity_analysis_fn(input_name,
                                         logit_name,
                                         network,
                                         handlers,
                                         inputs,
                                         outputs=None,
                                         *args,
                                         **kwargs):
    """
    returns a function from input to sensitivity analysis heatmap

    takes in additional keys for "input" and "idx"
    """
    if outputs is None:
        outputs = {}

    assert "outputs" not in outputs

    handlers = [
        SensitivityAnalysisOutput(idx_input_key="idx",
                                  output_key="outputs",
                                  input_name=input_name,
                                  logit_name=logit_name),
        canopy.handlers.override_hyperparameters(deterministic=True)
    ] + handlers

    assert "input" not in inputs
    assert "idx" not in inputs

    inputs["input"] = input_name
    fn = canopy.handled_fn(network,
                           handlers=handlers,
                           inputs=inputs,
                           outputs=outputs)

    return fn
예제 #13
0
    },
                learning_rate=2e-4),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)

network = model.network()
network.build()  # build eagerly to share weights
print(network.root_node)

valid_fn = canopy.handled_fn(network, [
    canopy.handlers.time_call(key="valid_time"),
    canopy.handlers.evaluate_monitoring_variables(fmt="valid_%s"),
    canopy.handlers.override_hyperparameters(deterministic=True),
    canopy.handlers.batch_pad(BATCH_SIZE, keys=["x", "y"]),
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {
    "valid_cost": "cost",
    "pred": "pred"
})


def validate(in_dict, results_dict):
    valid_out = valid_fn(valid)
    valid_y = valid["y"]
    probabilities = valid_out.pop("pred")[:len(valid_y)]
    predicted_classes = np.argmax(probabilities, axis=1)
    valid_out["valid_accuracy"] = (valid_y == predicted_classes).mean()
    results_dict.update(valid_out)
예제 #14
0
    tn.AdamNode(
        "updates",
        {"subtree": model,
         "cost": tn.TotalCostNode("cost", {
             "pred": tn.ReferenceNode("pred_ref", reference="model"),
             "target": tn.InputNode("y", shape=(None,), dtype="int32")},
         )}),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(deterministic=True),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"cost": "cost", "pred": "pred"})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_cost"] = valid_out["cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)

train_fn = canopy.handled_fn(
예제 #15
0
파일: cifar10.py 프로젝트: diogo149/treeano
             "pred": tn.ReferenceNode("pred_ref", reference="model"),
             "target": tn.InputNode("y", shape=(None,), dtype="int32")},
         )},
        learning_rate=2e-4),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)

network = model.network()
network.build()  # build eagerly to share weights
print(network.root_node)

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.evaluate_monitoring_variables(fmt="valid_%s"),
     canopy.handlers.override_hyperparameters(deterministic=True),
     canopy.handlers.batch_pad(BATCH_SIZE, keys=["x", "y"]),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"valid_cost": "cost", "pred": "pred"})


def validate(in_dict, results_dict):
    valid_out = valid_fn(valid)
    valid_y = valid["y"]
    probabilities = valid_out.pop("pred")[:len(valid_y)]
    predicted_classes = np.argmax(probabilities, axis=1)
    valid_out["valid_accuracy"] = (valid_y == predicted_classes).mean()
    results_dict.update(valid_out)

train_fn = canopy.handled_fn(
예제 #16
0
        "adam",
        {"subtree": model,
         "cost": anrat.ANRATNode("cost", {
             "pred": tn.ReferenceNode("pred_ref", reference="model"),
             "target": tn.InputNode("y", shape=(None,), dtype="int32")},
             i32_target=True,
         )}),
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500
train_fn = canopy.handled_fn(
    network,
    [canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"cost": "cost"},
    include_updates=True)

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.override_hyperparameters(dropout_probability=0),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"cost": "cost", "pred": "pred"})


# ################################# training #################################
예제 #17
0
    learning_rate=2e-3,
)
model = tn.SharedHyperparameterNode(
    "late_gate",
    model,
    hyperparameter="late_gate"
)

print(model)
network = model.network()
network.build()  # build eagerly to share weights

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(deterministic=True),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"valid_cost": "cost", "pred": "pred", "valid_total_cost": "total_cost"})


def validate(in_dict, result_dict):
    valid_out = valid_fn(valid)
    probabilities = valid_out.pop("pred")
    predicted_classes = np.argmax(probabilities, axis=1)
    result_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        valid["y"], predicted_classes)
    result_dict.update(valid_out)

train_fn = canopy.handled_fn(
    network,
예제 #18
0
                    "pred": tn.ReferenceNode("pred_ref", reference="model"),
                    "target": tn.InputNode("y", shape=(None, ), dtype="int32")
                },
            )
        }),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

valid_fn = canopy.handled_fn(network, [
    canopy.handlers.time_call(key="valid_time"),
    canopy.handlers.override_hyperparameters(bn_use_moving_stats=True),
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {
    "cost": "cost",
    "pred": "pred"
})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_cost"] = valid_out["cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)
예제 #19
0
        {"subtree": model,
         "cost": tn.TotalCostNode("cost", {
             "pred": tn.ReferenceNode("pred_ref", reference="model"),
             "target": tn.InputNode("y", shape=(None,), dtype="int32")},
             cost_function=treeano.utils.categorical_crossentropy_i32,
         )}),
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(bn_use_moving_stats=True),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"cost": "cost", "pred": "pred"})


def validate(in_map):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    in_map["valid_cost"] = valid_out["cost"]
    in_map["valid_time"] = valid_out["valid_time"]
    in_map["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)

train_fn = canopy.handled_fn(
예제 #20
0
                },
            )
        }),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(network, [
    canopy.handlers.time_call(key="valid_time"),
    canopy.handlers.override_hyperparameters(dropout_probability=0),
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {
    "cost": "cost",
    "pred": "pred"
})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_cost"] = valid_out["cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)
예제 #21
0
                {
                    "pred": tn.ReferenceNode("pred_ref", reference="model"),
                    "target": tn.InputNode("y", shape=(None, ), dtype="int32")
                },
            )
        }),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500
train_fn = canopy.handled_fn(network, [
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {"cost": "cost"},
                             include_updates=True)

valid_fn = canopy.handled_fn(network, [
    canopy.handlers.override_hyperparameters(dropout_probability=0),
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {
    "cost": "cost",
    "pred": "pred"
})
예제 #22
0
파일: model.py 프로젝트: rewonc/queequeg
            # build eagerly to share weights
            network.build()
            # to make sure network is serializable
            ttu.save_network(trial, "initial", network)

            # Validation
            valid_fn = canopy.handled_fn(
                network,
                [
                    canopy.handlers.time_call(key="valid_time"),
                    canopy.handlers.override_hyperparameters(
                        deterministic=True,
                        bn_use_moving_stats=False,
                        ),
                    canopy.handlers.evaluate_monitoring_variables(fmt="valid_%s"),
                    canopy.handlers.chunk_variables(batch_size=params.batch_size,
                                                    variables=["x", "y"])
                ],
                inputs={"x": "x", "y": "y"},
                outputs={
                    "valid_cost": "cost",
                    "valid_probs": "pred",
                    "window_min": ('windower', 'mins'),
                    "window_max": ('windower', 'maxs')
                    }
            )

            best_valid_iou = 0.0   # 0 is natural min.
            valid_iter = 0

            def add_validate_metrics(in_dict, result_dict):
                global best_valid_iou
예제 #23
0
with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
        "adam",
        {"subtree": model,
         "cost": tn.ReferenceNode("cost_ref", reference="total_cost")}),
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(dropout_probability=0),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"total_cost": "total_cost", "pred": "pred"})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_cost"] = valid_out["total_cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)

train_fn = canopy.handled_fn(
예제 #24
0
파일: mnist.py 프로젝트: shaoxuan92/treeano
    "with_updates",
    tn.AdamNode(
        "adam",
        {"subtree": model,
         "cost": tn.ReferenceNode("cost_ref", reference="total_cost")}),
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(dropout_probability=0),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"total_cost": "total_cost",
     "pred": "y_pred"})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_total_cost"] = valid_out["total_cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)
예제 #25
0
                    "target": tn.InputNode("y", shape=(None, ), dtype="int32")
                },
            )
        }),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

valid_fn = canopy.handled_fn(network, [
    canopy.handlers.time_call(key="valid_time"),
    canopy.handlers.override_hyperparameters(dropout_probability=0),
    canopy.handlers.batch_pad(BATCH_SIZE, keys=["x", "y"]),
    canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                    variables=["x", "y"])
], {
    "x": "x",
    "y": "y"
}, {
    "valid_cost": "cost",
    "pred": "pred"
})


def validate(in_dict, results_dict):
    valid_out = valid_fn(valid)
    valid_y = valid["y"]
    probabilities = valid_out.pop("pred")[:len(valid_y)]
    predicted_classes = np.argmax(probabilities, axis=1)
    valid_out["valid_accuracy"] = (valid_y == predicted_classes).mean()
    results_dict.update(valid_out)
예제 #26
0
        {"subtree": model,
         "cost": tn.TotalCostNode("cost", {
             "pred": tn.ReferenceNode("pred_ref", reference="model"),
             "target": tn.InputNode("y", shape=(None,), dtype="int32")},
         )}),
    cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build()  # build eagerly to share weights

BATCH_SIZE = 500

valid_fn = canopy.handled_fn(
    network,
    [canopy.handlers.time_call(key="valid_time"),
     canopy.handlers.override_hyperparameters(dropout_probability=0),
     canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
                                     variables=["x", "y"])],
    {"x": "x", "y": "y"},
    {"cost": "cost", "pred": "pred"})


def validate(in_dict, results_dict):
    valid_out = valid_fn(in_valid)
    probabilities = valid_out["pred"]
    predicted_classes = np.argmax(probabilities, axis=1)
    results_dict["valid_cost"] = valid_out["cost"]
    results_dict["valid_time"] = valid_out["valid_time"]
    results_dict["valid_accuracy"] = sklearn.metrics.accuracy_score(
        y_valid, predicted_classes)

train_fn = canopy.handled_fn(