Example #1
0
        res = X.dot(self.proj)[:, :self.n_classes]
        return res

    def transform(self, X):
        EPS = 1e-10
        if self.scale:
            X = np.array(X, dtype=np.float32)
            X = (X - self.X_offset) / (self.X_scale + EPS)

        if self.dims is None:
            dims = self.n_classes - 1
        else:
            dims = np.min([self.dims, self.n_classes - 1])

        res = X.dot(self.proj)[:, :dims]
        return res

    def score(self, X, y=None):
        preds = self.predict(X).argmax(1)
        return ScoreMulticlass(y, preds)


if __name__ == "__main__":
    testBreast(LDA(2, solver="eig"))
    testIris(LDA(2, solver="eig"))
    testBreast(LDA(2, solver="svd"))
    testIris(LDA(2, solver="svd"))
    testRecoIris(LDA(2, solver="eig"), display2D=True)
    testRecoWine(LDA(2, solver="eig"), display=True)
Example #2
0
        if self.scale:
            X = np.array(X, dtype=np.float32)
            X = (X - self.X_offset) / (self.X_scale + EPS)
        n_samples, _ = X.shape
        if self.fit_intercept:
            ones = np.ones((n_samples, 1))
            X = np.concatenate((ones, X), axis=1)
        
        if self.multi_class == 'ovr':
            preds = []
            for i in range(len(self.W)):
                preds.append(X.dot(self.W[i]))
            preds = np.array(preds).T
        elif self.multi_class == 'multi':
            preds = X.dot(self.W)
        else:
            raise NotImplementedError

        return preds


    def score(self, X: np.array, y: np.array) -> ScoreMulticlass:
        preds = self.predict(X).argmax(1)
        return ScoreMulticlass(y, preds)


if __name__ == '__main__':
    testIris(LinearSVC(lr=0.1, l2_ratio=1e-3, fit_intercept=True, multi_class='ovr')) # F1 0.92
    testIris(LinearSVC(lr=0.1, l2_ratio=1e-2, fit_intercept=False, multi_class='ovr')) # F1 0.84
    testIris(LinearSVC(lr=0.1, fit_intercept=True, multi_class='multi')) # F1 0.92
    testIris(LinearSVC(lr=0.1, fit_intercept=False, multi_class='multi')) # F1 0.72
            self.history.append(cross_entropy(y, preds))
        self.W = W
        pass

    def softmax(self, alpha):
        _, n_classes = alpha.shape
        return np.exp(alpha) / np.repeat(
            np.sum(np.exp(alpha), 1).reshape((-1, 1)), n_classes, axis=1
        )

    def predict(self, X):
        EPS = 1e-10
        if self.scale:
            X = np.array(X, dtype=np.float32)
            X = (X - self.X_offset) / (self.X_scale + EPS)
        if self.fit_intercept:
            n_samples, n_features = X.shape
            ones = np.ones((n_samples, 1))
            X = np.concatenate((ones, X), axis=1)
        preds = np.dot(X, self.W)
        preds = self.softmax(preds)
        return preds

    def score(self, X, y):
        preds = self.predict(X).argmax(1)
        return ScoreMulticlass(y, preds)


if __name__ == "__main__":
    testIris(LogisticRegression(n_iters=4000, lr=0.1))
Example #4
0
        n_samples, _ = X.shape
        Y = np.zeros((n_samples, 1))
        EPS = 1e-10
        if self.scale:
            X = np.array(X, dtype=np.float32)
            X = (X - self.X_offset) / (self.X_scale + EPS)

        for n in range(n_samples):
            distance = np.sqrt(np.sum((X[n] - self.X_params)**2, axis=1))
            distance_ind = np.argsort(distance)
            distance_ind = distance_ind[:self.N]
            nearestY = self.Y_params[distance_ind]
            if self.weights == "distance":
                weight = (1 / distance[distance_ind]).T
                weight /= weight.mean()
                # weight points by the inverse of their distance
                nearestY = (weight * nearestY.T).T
            Y[n] = np.mean(nearestY, 0)

        return (Y * self.Y_scale) + self.Y_offset

    def score(self, X, y):
        preds = self.predict(X)
        return Score(y, preds)


if __name__ == "__main__":
    testIris(KNNClassifier(5))
    testDigits(KNNClassifier(5, "distance"))
    testHousing(KNNRegressor(5))
Example #5
0
        for i in range(self.n_classes):
            self.p_classes[i] = sum(y[:, i]) / n_samples
            Xi = X[y[:, i] == 1]
            for j in range(n_features):
                self.mean_class[i, j] = Xi[:, j].mean()
                self.variance_class[i, j] = Xi[:, j].var()

    def predict(self, X):
        EPS = 1e-10
        n_samples, n_features = X.shape
        y = np.zeros((n_samples, self.n_classes))
        for i in range(self.n_classes):
            y[:, i] = self.p_classes[i]
            for j in range(n_features):
                y[:, i] *= 1 / (EPS +
                                np.sqrt(2 * np.pi * self.variance_class[i, j]))
                y[:, i] *= np.exp(-((X[:, j] - self.mean_class[i, j])**2) /
                                  (EPS + 2 * self.variance_class[i, j]))
        return y

    def score(self, X, y):
        preds = self.predict(X).argmax(1)
        return ScoreMulticlass(y, preds)


if __name__ == "__main__":
    testIris(NaiveBayes())
    testBreast(NaiveBayes())
    testDigits(NaiveBayes())
Example #6
0
    def predict(self, X):
        EPS = 1e-10
        if self.scale:
            X = np.array(X, dtype=np.float32)
            X = (X - self.X_offset) / (self.X_scale + EPS)
        preds = X
        for layer in self.layers:
            preds = layer.forward(preds)
        return preds

    def score(self, X, y):
        preds = self.predict(X).argmax(1)
        return ScoreMulticlass(y, preds)


if __name__ == "__main__":
    testIris(
        NeuralNetwork(
            layerSizes=[20, 10],
            n_iters=100,
            lr=1,
            activation=LeakyRelu,
            l1_ratio=1e-3,
            l2_ratio=1e-3,
        ))
    testDigits(
        NeuralNetwork(layerSizes=[60, 20],
                      n_iters=60,
                      lr=1,
                      activation=LeakyRelu))