Esempio n. 1
0
def test_020_matmul_build_specification():
    name = "matmul01"
    num_nodes = 8
    num_features = 2
    weights_initialization_scheme = "he"
    expected_spec = {
        _SCHEME: Matmul.class_id(),
        _PARAMETERS: {
            _NAME: name,
            _NUM_NODES: num_nodes,
            _NUM_FEATURES: num_features,  # NOT including bias
            _WEIGHTS: {
                _SCHEME: weights_initialization_scheme
            },
            _OPTIMIZER: SGD.specification(name="sgd")
        }
    }
    actual_spec = Matmul.specification(
        name=name,
        num_nodes=num_nodes,
        num_features=num_features,
        weights_initialization_scheme=weights_initialization_scheme,
    )
    assert expected_spec == actual_spec, \
        "expected\n%s\nactual\n%s\n" % (expected_spec, actual_spec)
Esempio n. 2
0
 def output(m, d):
     return {
         "matmul":
         Matmul.specification(
             name="matmul",
             num_nodes=m,
             num_features=d,
             weights_initialization_scheme="he",
             weights_optimizer_specification=optimizer.SGD.specification(
                 lr=0.05, l2=1e-3)),
         "loss":
         CrossEntropyLogLoss.specification(name="loss", num_nodes=m)
     }
def test():
    M = 1
    D = 2
    N = 100

    X, T, V = linear_separable(d=D, n=N)
    x_min, x_max = X[:, 0].min(), X[:, 0].max()
    y_min, y_max = X[:, 1].min(), X[:, 1].max()

    sigmoid_classifier_specification = {
        _NAME: "softmax_classifier",
        _NUM_NODES: M,
        _LOG_LEVEL: logging.ERROR,
        _COMPOSITE_LAYER_SPEC: {
            "matmul01":
            Matmul.specification(
                name="matmul",
                num_nodes=M,
                num_features=D,
                weights_initialization_scheme="he",
                weights_optimizer_specification=SGD.specification(
                    lr=TYPE_FLOAT(0.2), l2=TYPE_FLOAT(1e-3))),
            "loss":
            CrossEntropyLogLoss.specification(
                name="loss",
                num_nodes=M,
                loss_function=sigmoid_cross_entropy_log_loss.__qualname__)
        }
    }
    logistic_classifier = SequentialNetwork.build(
        specification=sigmoid_classifier_specification, )

    for i in range(50):
        logistic_classifier.train(X=X, T=T)

    prediction = logistic_classifier.predict(
        np.array([-1., -1.], dtype=TYPE_FLOAT))
    np.isin(prediction, [0, 1])
    print(prediction)
Esempio n. 4
0
 def inference(index: int, m: int, d: int) -> Dict[str, dict]:
     """Build matmul-bn-activation specifications
     Args:
         index: stack position in the network
         m: number of outputs (== number of nodes)
         d: number of features in the input
     """
     return {
         f"matmul{index:03d}":
         Matmul.specification(
             name=f"matmul{index:03d}",
             num_nodes=m,
             num_features=d,
             weights_initialization_scheme="he",
             weights_optimizer_specification=optimizer.SGD.specification(
                 lr=0.05, l2=1e-3)),
         f"bn{index:03d}":
         BatchNormalization.specification(
             name=f"bn{index:03d}",
             num_nodes=m,
             gamma_optimizer_specification=optimizer.SGD.specification(
                 lr=0.05, l2=1e-3),
             beta_optimizer_specification=optimizer.SGD.specification(
                 lr=0.05,
                 l2=1e-3,
             ),
             momentum=0.9),
         f"activation{index:03d}":
         ReLU.specification(
             name=f"relu{index:03d}",
             num_nodes=m,
         ) if activation == ReLU.class_id() else Sigmoid.specification(
             name=f"sigmoid{index:03d}",
             num_nodes=m,
         )
     }