Пример #1
0
    def fit(self, train_x, train_y, max_epochs=-1, target_acc=None, verbose=0):
        """
        :param train_x: train set features
        :param train_y: train set labels
        :param max_epochs: maximum number of epoches
        :param target_acc: if max_epoch is not given, use this stopping criterion
        :param verbose: 0 - print logs (e.g. losses and accuracy)
                        1 - don't print logs
        """
        epoch = 1

        # Adding another dimension for the bias term
        mapped_train_y = self.mapper(train_y)

        scores_train = []
        weights = []

        while True:
            training_order = np.random.permutation(range(train_x.shape[0]))
            changed = False
            for i_sample in training_order:
                x_, y_true = train_x[i_sample].copy().reshape(-1, 1), mapped_train_y[i_sample]
                y_pred = self.predict(x_)

                if y_pred != y_true:
                    changed = True
                    self.w += self.lr * y_true * Perceptron.concat_ones(x_)

            if not changed:
                break

            # score_train = self.print_training_log(verbose, train_x, mapped_train_y, epoch)
            # scores_train.append(score_train)

            epoch += 1
            weights.append(self.w.copy())
            if max_epochs != -1 and epoch >= max_epochs:
                break
            # elif max_epochs == -1 and score_train >= target_acc:
            #     break

        return scores_train, weights
Пример #2
0
 def score(self, x, y):
     x = Perceptron.concat_ones(x)
     preds_val = np.array([self.predict(x[i].reshape(-1, 1)) for i in range(x.shape[0])])
     preds_val = self.classes[preds_val]
     return np.mean(preds_val == y)