Example #1
0
        a = np.array(self.validation_cost)[-n:]
        signs = np.sign(a[1:] - a[:-1])
        b = -sum(signs) == (n - 1)  # b = True >>> going down continuously
        if b and len(self.validation_cost) > n:
            current_learning_rate = current_learning_rate * 1.05
        return current_learning_rate

    def _check_early_stopping(self):
        n = 5  # consider the last n values
        validation_cost_sequence = self.validation_cost
        if len(validation_cost_sequence) <= n: return None
        a = np.array(validation_cost_sequence)[-n:]
        signs = np.sign(a[1:] - a[:-1])
        b = sum(signs) == (n - 1)  # b = True >>> going up
        if b:
            ix = validation_cost_sequence.index(
                min(validation_cost_sequence))  # index of the best model
            return ix
        return b


#======= end of BatchGradientDescentEarlyStopping =============================================

md = BatchGradientDescentEarlyStopping(random_state=None, verbose=True)
md.fit(X, y)

probs = md.precict_probabilities(X)
y = md.predict(X)
acuracy = md.accuracy(X, y)
print("accuracy on the whole data set =", accuracy)