def forward(self, inputs):
     self.inputs = inputs
     res = []
     # get the output value of each unit
     for unit in self.nodes:
         val = self.activation.f(dot_product(unit.weights, inputs))
         unit.val = val
         res.append(val)
     return res
예제 #2
0
def LogisticLinearLeaner(dataset, learning_rate=0.01, epochs=100):
    """
    [Section 18.6.5]
    Linear classifier with logistic regression.
    """
    idx_i = dataset.inputs
    idx_t = dataset.target
    examples = dataset.examples
    num_examples = len(examples)

    # X transpose
    X_col = [dataset.values[i] for i in idx_i]  # vertical columns of X

    # add dummy
    ones = [1 for _ in range(len(examples))]
    X_col = [ones] + X_col

    # initialize random weights
    num_weights = len(idx_i) + 1
    w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights)

    for epoch in range(epochs):
        err = []
        h = []
        # pass over all examples
        for example in examples:
            x = [1] + example
            y = sigmoid(dot_product(w, x))
            h.append(sigmoid_derivative(y))
            t = example[idx_t]
            err.append(t - y)

        # update weights
        for i in range(len(w)):
            buffer = [x * y for x, y in zip(err, h)]
            w[i] = w[i] + learning_rate * (dot_product(buffer, X_col[i]) /
                                           num_examples)

    def predict(example):
        x = [1] + example
        return sigmoid(dot_product(w, x))

    return predict
예제 #3
0
def LinearLearner(dataset, learning_rate=0.01, epochs=100):
    """
    [Section 18.6.4]
    Linear classifier with hard threshold.
    """
    idx_i = dataset.inputs
    idx_t = dataset.target
    examples = dataset.examples
    num_examples = len(examples)

    # X transpose
    X_col = [dataset.values[i] for i in idx_i]  # vertical columns of X

    # add dummy
    ones = [1 for _ in range(len(examples))]
    X_col = [ones] + X_col

    # initialize random weights
    num_weights = len(idx_i) + 1
    w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights)

    for epoch in range(epochs):
        err = []
        # pass over all examples
        for example in examples:
            x = [1] + example
            y = dot_product(w, x)
            t = example[idx_t]
            err.append(t - y)

        # update weights
        for i in range(len(w)):
            w[i] = w[i] + learning_rate * (dot_product(err, X_col[i]) /
                                           num_examples)

    def predict(example):
        x = [1] + example
        return dot_product(w, x)

    return predict
예제 #4
0
 def predict(example):
     x = [1] + example
     return sigmoid(dot_product(w, x))
예제 #5
0
 def predict(example):
     x = [1] + example
     return dot_product(w, x)